mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/create_context_typing
This commit is contained in:
commit
dd0f8f4ca3
158 changed files with 158 additions and 17631 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -34,7 +34,6 @@ Temporary Items
|
|||
|
||||
# Package dirs
|
||||
###########
|
||||
/server_addon/packages/*
|
||||
/package/*
|
||||
|
||||
/.venv
|
||||
|
|
|
|||
|
|
@ -181,25 +181,23 @@ class HostDirmap(object):
|
|||
cached=False)
|
||||
|
||||
# overrides for roots set in `Site Settings`
|
||||
active_roots = sitesync_addon.get_site_root_overrides(
|
||||
project_name, active_site)
|
||||
remote_roots = sitesync_addon.get_site_root_overrides(
|
||||
project_name, remote_site)
|
||||
active_roots_overrides = self._get_site_root_overrides(
|
||||
sitesync_addon, project_name, active_site)
|
||||
|
||||
self.log.debug("active roots overrides {}".format(active_roots))
|
||||
self.log.debug("remote roots overrides {}".format(remote_roots))
|
||||
remote_roots_overrides = self._get_site_root_overrides(
|
||||
sitesync_addon, project_name, remote_site)
|
||||
|
||||
current_platform = platform.system().lower()
|
||||
remote_provider = sitesync_addon.get_provider_for_site(
|
||||
project_name, remote_site
|
||||
)
|
||||
# dirmap has sense only with regular disk provider, in the workfile
|
||||
# won't be root on cloud or sftp provider
|
||||
# won't be root on cloud or sftp provider so fallback to studio
|
||||
if remote_provider != "local_drive":
|
||||
remote_site = "studio"
|
||||
for root_name, active_site_dir in active_roots.items():
|
||||
for root_name, active_site_dir in active_roots_overrides.items():
|
||||
remote_site_dir = (
|
||||
remote_roots.get(root_name)
|
||||
remote_roots_overrides.get(root_name)
|
||||
or sync_settings["sites"][remote_site]["root"][root_name]
|
||||
)
|
||||
|
||||
|
|
@ -220,3 +218,22 @@ class HostDirmap(object):
|
|||
|
||||
self.log.debug("local sync mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
||||
def _get_site_root_overrides(
|
||||
self, sitesync_addon, project_name, site_name
|
||||
):
|
||||
"""Safely handle root overrides.
|
||||
|
||||
SiteSync raises ValueError for non local or studio sites.
|
||||
"""
|
||||
# TODO: could be removed when `get_site_root_overrides` is not raising
|
||||
# an Error but just returns {}
|
||||
try:
|
||||
site_roots_overrides = sitesync_addon.get_site_root_overrides(
|
||||
project_name, site_name)
|
||||
except ValueError:
|
||||
site_roots_overrides = {}
|
||||
self.log.debug("{} roots overrides {}".format(
|
||||
site_name, site_roots_overrides))
|
||||
|
||||
return site_roots_overrides
|
||||
|
|
|
|||
|
|
@ -978,7 +978,7 @@ def _ffmpeg_h264_codec_args(stream_data, source_ffmpeg_cmd):
|
|||
if pix_fmt:
|
||||
output.extend(["-pix_fmt", pix_fmt])
|
||||
|
||||
output.extend(["-intra", "-g", "1"])
|
||||
output.extend(["-g", "1"])
|
||||
return output
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import pyblish.api
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import is_func_signature_supported
|
||||
from ayon_core.lib.attribute_definitions import (
|
||||
UnknownDef,
|
||||
serialize_attr_defs,
|
||||
|
|
@ -1405,6 +1406,7 @@ class CreateContext:
|
|||
self._current_workfile_path = None
|
||||
self._current_project_settings = None
|
||||
|
||||
self._current_project_entity = _NOT_SET
|
||||
self._current_folder_entity = _NOT_SET
|
||||
self._current_task_entity = _NOT_SET
|
||||
self._current_task_type = _NOT_SET
|
||||
|
|
@ -1593,6 +1595,22 @@ class CreateContext:
|
|||
self._current_task_type = task_type
|
||||
return self._current_task_type
|
||||
|
||||
def get_current_project_entity(self):
|
||||
"""Project entity for current context project.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Folder entity.
|
||||
|
||||
"""
|
||||
if self._current_project_entity is not _NOT_SET:
|
||||
return copy.deepcopy(self._current_project_entity)
|
||||
project_entity = None
|
||||
project_name = self.get_current_project_name()
|
||||
if project_name:
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
self._current_project_entity = project_entity
|
||||
return copy.deepcopy(self._current_project_entity)
|
||||
|
||||
def get_current_folder_entity(self):
|
||||
"""Folder entity for current context folder.
|
||||
|
||||
|
|
@ -1789,6 +1807,7 @@ class CreateContext:
|
|||
self._current_task_name = task_name
|
||||
self._current_workfile_path = workfile_path
|
||||
|
||||
self._current_project_entity = _NOT_SET
|
||||
self._current_folder_entity = _NOT_SET
|
||||
self._current_task_entity = _NOT_SET
|
||||
self._current_task_type = _NOT_SET
|
||||
|
|
@ -2084,13 +2103,22 @@ class CreateContext:
|
|||
# TODO validate types
|
||||
_pre_create_data.update(pre_create_data)
|
||||
|
||||
product_name = creator.get_product_name(
|
||||
project_entity = self.get_current_project_entity()
|
||||
args = (
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
self.host_name,
|
||||
)
|
||||
kwargs = {"project_entity": project_entity}
|
||||
# Backwards compatibility for 'project_entity' argument
|
||||
# - 'get_product_name' signature changed 24/07/08
|
||||
if not is_func_signature_supported(
|
||||
creator.get_product_name, *args, **kwargs
|
||||
):
|
||||
kwargs.pop("project_entity")
|
||||
product_name = creator.get_product_name(*args, **kwargs)
|
||||
|
||||
instance_data = {
|
||||
"folderPath": folder_entity["path"],
|
||||
|
|
|
|||
|
|
@ -303,7 +303,6 @@ class BaseCreator:
|
|||
))
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
"""Identifier of creator (must be unique).
|
||||
|
|
@ -498,7 +497,8 @@ class BaseCreator:
|
|||
task_entity,
|
||||
variant,
|
||||
host_name=None,
|
||||
instance=None
|
||||
instance=None,
|
||||
project_entity=None,
|
||||
):
|
||||
"""Return product name for passed context.
|
||||
|
||||
|
|
@ -515,8 +515,9 @@ class BaseCreator:
|
|||
instance (Optional[CreatedInstance]): Object of 'CreatedInstance'
|
||||
for which is product name updated. Passed only on product name
|
||||
update.
|
||||
"""
|
||||
project_entity (Optional[dict[str, Any]]): Project entity.
|
||||
|
||||
"""
|
||||
if host_name is None:
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
|
|
@ -542,7 +543,8 @@ class BaseCreator:
|
|||
self.product_type,
|
||||
variant,
|
||||
dynamic_data=dynamic_data,
|
||||
project_settings=self.project_settings
|
||||
project_settings=self.project_settings,
|
||||
project_entity=project_entity,
|
||||
)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import filter_profiles, prepare_template_data
|
||||
|
||||
|
|
@ -88,6 +90,7 @@ def get_product_name(
|
|||
dynamic_data=None,
|
||||
project_settings=None,
|
||||
product_type_filter=None,
|
||||
project_entity=None,
|
||||
):
|
||||
"""Calculate product name based on passed context and AYON settings.
|
||||
|
||||
|
|
@ -120,12 +123,18 @@ def get_product_name(
|
|||
product_type_filter (Optional[str]): Use different product type for
|
||||
product template filtering. Value of `product_type` is used when
|
||||
not passed.
|
||||
project_entity (Optional[Dict[str, Any]]): Project entity used when
|
||||
task short name is required by template.
|
||||
|
||||
Returns:
|
||||
str: Product name.
|
||||
|
||||
Raises:
|
||||
TaskNotSetError: If template requires task which is not provided.
|
||||
TemplateFillError: If filled template contains placeholder key which
|
||||
is not collected.
|
||||
"""
|
||||
|
||||
"""
|
||||
if not product_type:
|
||||
return ""
|
||||
|
||||
|
|
@ -150,6 +159,16 @@ def get_product_name(
|
|||
if "{task}" in template.lower():
|
||||
task_value = task_name
|
||||
|
||||
elif "{task[short]}" in template.lower():
|
||||
if project_entity is None:
|
||||
project_entity = ayon_api.get_project(project_name)
|
||||
task_types_by_name = {
|
||||
task["name"]: task for task in
|
||||
project_entity["taskTypes"]
|
||||
}
|
||||
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
||||
task_value["short"] = task_short
|
||||
|
||||
fill_pairs = {
|
||||
"variant": variant,
|
||||
"family": product_type,
|
||||
|
|
|
|||
|
|
@ -587,6 +587,21 @@ def switch_container(container, representation, loader_plugin=None):
|
|||
return loader.switch(container, context)
|
||||
|
||||
|
||||
def _fix_representation_context_compatibility(repre_context):
|
||||
"""Helper function to fix representation context compatibility.
|
||||
|
||||
Args:
|
||||
repre_context (dict): Representation context.
|
||||
|
||||
"""
|
||||
# Auto-fix 'udim' being list of integers
|
||||
# - This is a legacy issue for old representation entities,
|
||||
# added 24/07/10
|
||||
udim = repre_context.get("udim")
|
||||
if isinstance(udim, list):
|
||||
repre_context["udim"] = udim[0]
|
||||
|
||||
|
||||
def get_representation_path_from_context(context):
|
||||
"""Preparation wrapper using only context as a argument"""
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
|
|
@ -638,7 +653,9 @@ def get_representation_path_with_anatomy(repre_entity, anatomy):
|
|||
|
||||
try:
|
||||
context = repre_entity["context"]
|
||||
_fix_representation_context_compatibility(context)
|
||||
context["root"] = anatomy.roots
|
||||
|
||||
path = StringTemplate.format_strict_template(template, context)
|
||||
|
||||
except TemplateUnsolved as exc:
|
||||
|
|
@ -681,6 +698,9 @@ def get_representation_path(representation, root=None):
|
|||
|
||||
try:
|
||||
context = representation["context"]
|
||||
|
||||
_fix_representation_context_compatibility(context)
|
||||
|
||||
context["root"] = root
|
||||
path = StringTemplate.format_strict_template(
|
||||
template, context
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
Build templates are manually prepared using plugin definitions which create
|
||||
placeholders inside the template which are populated on import.
|
||||
|
||||
This approach is very explicit to achive very specific build logic that can be
|
||||
This approach is very explicit to achieve very specific build logic that can be
|
||||
targeted by task types and names.
|
||||
|
||||
Placeholders are created using placeholder plugins which should care about
|
||||
|
|
@ -87,7 +87,7 @@ class AbstractTemplateBuilder(object):
|
|||
"""Abstraction of Template Builder.
|
||||
|
||||
Builder cares about context, shared data, cache, discovery of plugins
|
||||
and trigger logic. Provides public api for host workfile build systen.
|
||||
and trigger logic. Provides public api for host workfile build system.
|
||||
|
||||
Rest of logic is based on plugins that care about collection and creation
|
||||
of placeholder items.
|
||||
|
|
@ -806,7 +806,7 @@ class AbstractTemplateBuilder(object):
|
|||
)
|
||||
|
||||
def get_template_preset(self):
|
||||
"""Unified way how template preset is received usign settings.
|
||||
"""Unified way how template preset is received using settings.
|
||||
|
||||
Method is dependent on '_get_build_profiles' which should return filter
|
||||
profiles to resolve path to a template. Default implementation looks
|
||||
|
|
@ -1427,7 +1427,7 @@ class PlaceholderLoadMixin(object):
|
|||
placeholder='{"camera":"persp", "lights":True}',
|
||||
tooltip=(
|
||||
"Loader"
|
||||
"\nDefines a dictionnary of arguments used to load assets."
|
||||
"\nDefines a dictionary of arguments used to load assets."
|
||||
"\nUseable arguments depend on current placeholder Loader."
|
||||
"\nField should be a valid python dict."
|
||||
" Anything else will be ignored."
|
||||
|
|
@ -1472,7 +1472,7 @@ class PlaceholderLoadMixin(object):
|
|||
]
|
||||
|
||||
def parse_loader_args(self, loader_args):
|
||||
"""Helper function to parse string of loader arugments.
|
||||
"""Helper function to parse string of loader arguments.
|
||||
|
||||
Empty dictionary is returned if conversion fails.
|
||||
|
||||
|
|
@ -1872,7 +1872,7 @@ class PlaceholderCreateMixin(object):
|
|||
creator_plugin.identifier,
|
||||
create_variant,
|
||||
folder_entity,
|
||||
task_name=task_name,
|
||||
task_entity,
|
||||
pre_create_data=pre_create_data
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import ayon_api
|
||||
import ayon_api.utils
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -23,6 +24,12 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin):
|
|||
if inst_repre:
|
||||
representations.update(inst_repre)
|
||||
|
||||
# Ignore representation ids that are not valid
|
||||
representations = {
|
||||
representation_id for representation_id in representations
|
||||
if ayon_api.utils.convert_entity_id(representation_id)
|
||||
}
|
||||
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name=context.data["projectName"],
|
||||
representation_ids=representations,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import ayon_api
|
||||
import pyblish.api
|
||||
import ayon_api.utils
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||
|
|
@ -41,6 +42,12 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
|||
for container in containers
|
||||
}
|
||||
|
||||
# Ignore representation ids that are not valid
|
||||
repre_ids = {
|
||||
representation_id for representation_id in repre_ids
|
||||
if ayon_api.utils.convert_entity_id(representation_id)
|
||||
}
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name,
|
||||
|
|
@ -65,7 +72,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
# NOTE:
|
||||
# may have more then one representation that are same version
|
||||
# may have more than one representation that are same version
|
||||
version = {
|
||||
"container_name": con["name"],
|
||||
"representation_id": repre_entity["id"],
|
||||
|
|
|
|||
|
|
@ -789,11 +789,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
if value is not None:
|
||||
repre_context[key] = value
|
||||
|
||||
# Explicitly store the full list even though template data might
|
||||
# have a different value because it uses just a single udim tile
|
||||
if repre.get("udim"):
|
||||
repre_context["udim"] = repre.get("udim") # store list
|
||||
|
||||
# Use previous representation's id if there is a name match
|
||||
existing = existing_repres_by_name.get(repre["name"].lower())
|
||||
repre_id = None
|
||||
|
|
|
|||
|
|
@ -166,6 +166,12 @@ class AbstractPublisherBackend(AbstractPublisherCommon):
|
|||
) -> Union[TaskItem, None]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_project_entity(
|
||||
self, project_name: str
|
||||
) -> Union[Dict[str, Any], None]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_folder_entity(
|
||||
self, project_name: str, folder_id: str
|
||||
|
|
|
|||
|
|
@ -193,6 +193,9 @@ class PublisherController(
|
|||
def get_convertor_items(self):
|
||||
return self._create_model.get_convertor_items()
|
||||
|
||||
def get_project_entity(self, project_name):
|
||||
return self._projects_model.get_project_entity(project_name)
|
||||
|
||||
def get_folder_type_items(self, project_name, sender=None):
|
||||
return self._projects_model.get_folder_type_items(
|
||||
project_name, sender
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from ayon_core.lib.attribute_definitions import (
|
|||
)
|
||||
from ayon_core.lib.profiles_filtering import filter_profiles
|
||||
from ayon_core.lib.attribute_definitions import UIDef
|
||||
from ayon_core.lib import is_func_signature_supported
|
||||
from ayon_core.pipeline.create import (
|
||||
BaseCreator,
|
||||
AutoCreator,
|
||||
|
|
@ -26,6 +27,7 @@ from ayon_core.tools.publisher.abstract import (
|
|||
AbstractPublisherBackend,
|
||||
CardMessageTypes,
|
||||
)
|
||||
|
||||
CREATE_EVENT_SOURCE = "publisher.create.model"
|
||||
|
||||
|
||||
|
|
@ -356,13 +358,24 @@ class CreateModel:
|
|||
project_name, task_item.task_id
|
||||
)
|
||||
|
||||
return creator.get_product_name(
|
||||
project_entity = self._controller.get_project_entity(project_name)
|
||||
args = (
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
instance=instance
|
||||
variant
|
||||
)
|
||||
kwargs = {
|
||||
"instance": instance,
|
||||
"project_entity": project_entity,
|
||||
}
|
||||
# Backwards compatibility for 'project_entity' argument
|
||||
# - 'get_product_name' signature changed 24/07/08
|
||||
if not is_func_signature_supported(
|
||||
creator.get_product_name, *args, **kwargs
|
||||
):
|
||||
kwargs.pop("project_entity")
|
||||
return creator.get_product_name(*args, **kwargs)
|
||||
|
||||
def create(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class UpdateDialog(QtWidgets.QDialog):
|
|||
top_layout.addWidget(label_widget, 1)
|
||||
|
||||
ignore_btn = QtWidgets.QPushButton("Ignore", self)
|
||||
restart_btn = QtWidgets.QPushButton("Restart && Change", self)
|
||||
restart_btn = QtWidgets.QPushButton("Restart && Update", self)
|
||||
restart_btn.setObjectName("TrayRestartButton")
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout()
|
||||
|
|
|
|||
|
|
@ -485,7 +485,10 @@ class _IconsCache:
|
|||
parts = [icon_type, icon_def["path"]]
|
||||
|
||||
elif icon_type in {"awesome-font", "material-symbols"}:
|
||||
parts = [icon_type, icon_def["name"], icon_def["color"]]
|
||||
color = icon_def["color"] or ""
|
||||
if isinstance(color, QtGui.QColor):
|
||||
color = color.name()
|
||||
parts = [icon_type, icon_def["name"] or "", color]
|
||||
return "|".join(parts)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON core addon version."""
|
||||
__version__ = "0.4.1-dev.1"
|
||||
__version__ = "0.4.2-dev.1"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "core"
|
||||
title = "Core"
|
||||
version = "0.4.1-dev.1"
|
||||
version = "0.4.2-dev.1"
|
||||
|
||||
client_dir = "ayon_core"
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[tool.poetry]
|
||||
name = "ayon-core"
|
||||
version = "0.3.1"
|
||||
version = "0.4.2-dev.1"
|
||||
description = ""
|
||||
authors = ["Ynput Team <team@ynput.io>"]
|
||||
readme = "README.md"
|
||||
|
|
@ -79,11 +79,7 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
|||
|
||||
exclude = [
|
||||
"client/ayon_core/modules/click_wrap.py",
|
||||
"client/ayon_core/scripts/slates/__init__.py",
|
||||
"server_addon/deadline/client/ayon_deadline/repository/custom/plugins/CelAction/*",
|
||||
"server_addon/deadline/client/ayon_deadline/repository/custom/plugins/HarmonyAYON/*",
|
||||
"server_addon/hiero/client/ayon_hiero/api/startup/*",
|
||||
"server_addon/aftereffects/client/ayon_aftereffects/api/extension/js/libs/*"
|
||||
"client/ayon_core/scripts/slates/__init__.py"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
|
|
|
|||
|
|
@ -1,34 +0,0 @@
|
|||
# Addons for AYON server
|
||||
Preparation of AYON addons based on OpenPype codebase. The output is a bunch of zip files in `./packages` directory that can be uploaded to AYON server. One of the packages is `openpype` which is OpenPype code converted to AYON addon. The addon is must have requirement to be able to use `ayon-launcher`. The versioning of `openpype` addon is following versioning of OpenPype. The other addons contain only settings models.
|
||||
|
||||
## Intro
|
||||
OpenPype is transitioning to AYON, a dedicated server with its own database, moving away from MongoDB. During this transition period, OpenPype will remain compatible with both MongoDB and AYON. However, we will gradually update the codebase to align with AYON's data structure and separate individual components into addons.
|
||||
|
||||
Currently, OpenPype has an AYON mode, which means it utilizes the AYON server instead of MongoDB through conversion utilities. Initially, we added the AYON executable alongside the OpenPype executables to enable AYON mode. While this approach worked, updating to new code versions would require a complete reinstallation. To address this, we have decided to create a new repository specifically for the base desktop application logic, which we currently refer to as the AYON Launcher. This Launcher will replace the executables generated by the OpenPype build and convert the OpenPype code into a server addon, resulting in smaller updates.
|
||||
|
||||
Since the implementation of the AYON Launcher is not yet fully completed, we will maintain both methods of starting AYON mode for now. Once the AYON Launcher is finished, we will remove the AYON executables from the OpenPype codebase entirely.
|
||||
|
||||
During this transitional period, the AYON Launcher addon will be a requirement as the entry point for using the AYON Launcher.
|
||||
|
||||
## How to start
|
||||
There is a `create_ayon_addons.py` python file which contains logic how to create server addon from OpenPype codebase. Just run the code.
|
||||
```shell
|
||||
./.poetry/bin/poetry run python ./server_addon/create_ayon_addons.py
|
||||
```
|
||||
|
||||
It will create directory `./packages/<addon name>.zip` files for AYON server. You can then copy upload the zip files to AYON server. Restart server to update addons information, add the addon version to server bundle and set the bundle for production or staging usage.
|
||||
|
||||
Once addon is on server and is enabled, you can just run AYON launcher. Content will be downloaded and used automatically.
|
||||
|
||||
### Additional arguments
|
||||
Additional arguments are useful for development purposes.
|
||||
|
||||
To skip zip creation to keep only server ready folder structure, pass `--skip-zip` argument.
|
||||
```shell
|
||||
./.poetry/bin/poetry run python ./server_addon/create_ayon_addons.py --skip-zip
|
||||
```
|
||||
|
||||
To create both zips and keep folder structure, pass `--keep-sources` argument.
|
||||
```shell
|
||||
./.poetry/bin/poetry run python ./server_addon/create_ayon_addons.py --keep-sources
|
||||
```
|
||||
|
|
@ -1,376 +0,0 @@
|
|||
import io
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import zipfile
|
||||
import types
|
||||
import importlib.machinery
|
||||
import platform
|
||||
import collections
|
||||
from pathlib import Path
|
||||
from typing import Optional, Iterable, Pattern, List, Tuple
|
||||
|
||||
# Patterns of directories to be skipped for server part of addon
|
||||
IGNORE_DIR_PATTERNS: List[Pattern] = [
|
||||
re.compile(pattern)
|
||||
for pattern in {
|
||||
# Skip directories starting with '.'
|
||||
r"^\.",
|
||||
# Skip any pycache folders
|
||||
"^__pycache__$"
|
||||
}
|
||||
]
|
||||
|
||||
# Patterns of files to be skipped for server part of addon
|
||||
IGNORE_FILE_PATTERNS: List[Pattern] = [
|
||||
re.compile(pattern)
|
||||
for pattern in {
|
||||
# Skip files starting with '.'
|
||||
# NOTE this could be an issue in some cases
|
||||
r"^\.",
|
||||
# Skip '.pyc' files
|
||||
r"\.pyc$"
|
||||
}
|
||||
]
|
||||
|
||||
IGNORED_HOSTS = [
|
||||
"flame",
|
||||
"harmony",
|
||||
]
|
||||
|
||||
IGNORED_MODULES = []
|
||||
|
||||
PACKAGE_PY_TEMPLATE = """name = "{addon_name}"
|
||||
version = "{addon_version}"
|
||||
plugin_for = ["ayon_server"]
|
||||
"""
|
||||
|
||||
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon '{}' version."""
|
||||
__version__ = "{}"
|
||||
'''
|
||||
|
||||
|
||||
class ZipFileLongPaths(zipfile.ZipFile):
|
||||
"""Allows longer paths in zip files.
|
||||
|
||||
Regular DOS paths are limited to MAX_PATH (260) characters, including
|
||||
the string's terminating NUL character.
|
||||
That limit can be exceeded by using an extended-length path that
|
||||
starts with the '\\?\' prefix.
|
||||
"""
|
||||
_is_windows = platform.system().lower() == "windows"
|
||||
|
||||
def _extract_member(self, member, tpath, pwd):
|
||||
if self._is_windows:
|
||||
tpath = os.path.abspath(tpath)
|
||||
if tpath.startswith("\\\\"):
|
||||
tpath = "\\\\?\\UNC\\" + tpath[2:]
|
||||
else:
|
||||
tpath = "\\\\?\\" + tpath
|
||||
|
||||
return super()._extract_member(member, tpath, pwd)
|
||||
|
||||
|
||||
def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool:
|
||||
return any(
|
||||
regex.search(value)
|
||||
for regex in regexes
|
||||
)
|
||||
|
||||
|
||||
def find_files_in_subdir(
|
||||
src_path: str,
|
||||
ignore_file_patterns: Optional[List[Pattern]] = None,
|
||||
ignore_dir_patterns: Optional[List[Pattern]] = None,
|
||||
include_empty_dirs: bool = True
|
||||
):
|
||||
"""Find all files to copy in subdirectories of given path.
|
||||
|
||||
All files that match any of the patterns in 'ignore_file_patterns' will
|
||||
be skipped and any directories that match any of the patterns in
|
||||
'ignore_dir_patterns' will be skipped with all subfiles.
|
||||
|
||||
Args:
|
||||
src_path (str): Path to directory to search in.
|
||||
ignore_file_patterns (Optional[List[Pattern]]): List of regexes
|
||||
to match files to ignore.
|
||||
ignore_dir_patterns (Optional[List[Pattern]]): List of regexes
|
||||
to match directories to ignore.
|
||||
include_empty_dirs (Optional[bool]): Do not skip empty directories.
|
||||
|
||||
Returns:
|
||||
List[Tuple[str, str]]: List of tuples with path to file and parent
|
||||
directories relative to 'src_path'.
|
||||
"""
|
||||
if not os.path.exists(src_path):
|
||||
return []
|
||||
|
||||
if ignore_file_patterns is None:
|
||||
ignore_file_patterns = IGNORE_FILE_PATTERNS
|
||||
|
||||
if ignore_dir_patterns is None:
|
||||
ignore_dir_patterns = IGNORE_DIR_PATTERNS
|
||||
output: List[Tuple[str, str]] = []
|
||||
|
||||
hierarchy_queue = collections.deque()
|
||||
hierarchy_queue.append((src_path, []))
|
||||
while hierarchy_queue:
|
||||
item: Tuple[str, List[str]] = hierarchy_queue.popleft()
|
||||
dirpath, parents = item
|
||||
subnames = list(os.listdir(dirpath))
|
||||
if not subnames and include_empty_dirs:
|
||||
output.append((dirpath, os.path.sep.join(parents)))
|
||||
|
||||
for name in subnames:
|
||||
path = os.path.join(dirpath, name)
|
||||
if os.path.isfile(path):
|
||||
if not _value_match_regexes(name, ignore_file_patterns):
|
||||
items = list(parents)
|
||||
items.append(name)
|
||||
output.append((path, os.path.sep.join(items)))
|
||||
continue
|
||||
|
||||
if not _value_match_regexes(name, ignore_dir_patterns):
|
||||
items = list(parents)
|
||||
items.append(name)
|
||||
hierarchy_queue.append((path, items))
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def create_addon_zip(
|
||||
output_dir: Path,
|
||||
addon_name: str,
|
||||
addon_version: str,
|
||||
files_mapping: List[Tuple[str, str]],
|
||||
client_zip_content: io.BytesIO
|
||||
):
|
||||
zip_filepath = output_dir / f"{addon_name}-{addon_version}.zip"
|
||||
|
||||
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for src_path, dst_subpath in files_mapping:
|
||||
zipf.write(src_path, dst_subpath)
|
||||
|
||||
if client_zip_content is not None:
|
||||
zipf.writestr("private/client.zip", client_zip_content.getvalue())
|
||||
|
||||
|
||||
def prepare_client_zip(
|
||||
addon_dir: Path,
|
||||
addon_name: str,
|
||||
addon_version: str,
|
||||
client_dir: str
|
||||
):
|
||||
if not client_dir:
|
||||
return None
|
||||
client_dir_obj = addon_dir / "client" / client_dir
|
||||
if not client_dir_obj.exists():
|
||||
return None
|
||||
|
||||
# Update version.py with server version if 'version.py' is available
|
||||
version_path = client_dir_obj / "version.py"
|
||||
if version_path.exists():
|
||||
with open(version_path, "w") as stream:
|
||||
stream.write(
|
||||
CLIENT_VERSION_CONTENT.format(addon_name, addon_version)
|
||||
)
|
||||
|
||||
zip_content = io.BytesIO()
|
||||
with ZipFileLongPaths(zip_content, "a", zipfile.ZIP_DEFLATED) as zipf:
|
||||
# Add client code content to zip
|
||||
for path, sub_path in find_files_in_subdir(
|
||||
str(client_dir_obj), include_empty_dirs=False
|
||||
):
|
||||
sub_path = os.path.join(client_dir, sub_path)
|
||||
zipf.write(path, sub_path)
|
||||
|
||||
zip_content.seek(0)
|
||||
return zip_content
|
||||
|
||||
|
||||
def import_filepath(path: Path, module_name: Optional[str] = None):
|
||||
if not module_name:
|
||||
module_name = os.path.splitext(path.name)[0]
|
||||
|
||||
# Convert to string
|
||||
path = str(path)
|
||||
module = types.ModuleType(module_name)
|
||||
module.__file__ = path
|
||||
|
||||
# Use loader so module has full specs
|
||||
module_loader = importlib.machinery.SourceFileLoader(
|
||||
module_name, path
|
||||
)
|
||||
module_loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def _get_server_mapping(
|
||||
addon_dir: Path, addon_version: str
|
||||
) -> List[Tuple[str, str]]:
|
||||
server_dir = addon_dir / "server"
|
||||
public_dir = addon_dir / "public"
|
||||
src_package_py = addon_dir / "package.py"
|
||||
pyproject_toml = addon_dir / "client" / "pyproject.toml"
|
||||
|
||||
mapping: List[Tuple[str, str]] = [
|
||||
(src_path, f"server/{sub_path}")
|
||||
for src_path, sub_path in find_files_in_subdir(str(server_dir))
|
||||
]
|
||||
mapping.extend([
|
||||
(src_path, f"public/{sub_path}")
|
||||
for src_path, sub_path in find_files_in_subdir(str(public_dir))
|
||||
])
|
||||
mapping.append((src_package_py.as_posix(), "package.py"))
|
||||
if pyproject_toml.exists():
|
||||
mapping.append((pyproject_toml.as_posix(), "private/pyproject.toml"))
|
||||
|
||||
return mapping
|
||||
|
||||
|
||||
def create_addon_package(
|
||||
addon_dir: Path,
|
||||
output_dir: Path,
|
||||
create_zip: bool,
|
||||
):
|
||||
src_package_py = addon_dir / "package.py"
|
||||
|
||||
package = import_filepath(src_package_py)
|
||||
addon_name = package.name
|
||||
addon_version = package.version
|
||||
|
||||
files_mapping = _get_server_mapping(addon_dir, addon_version)
|
||||
|
||||
client_dir = getattr(package, "client_dir", None)
|
||||
client_zip_content = prepare_client_zip(
|
||||
addon_dir, addon_name, addon_version, client_dir
|
||||
)
|
||||
|
||||
if create_zip:
|
||||
create_addon_zip(
|
||||
output_dir,
|
||||
addon_name,
|
||||
addon_version,
|
||||
files_mapping,
|
||||
client_zip_content
|
||||
)
|
||||
|
||||
else:
|
||||
addon_output_dir = output_dir / addon_dir.name / addon_version
|
||||
if addon_output_dir.exists():
|
||||
shutil.rmtree(str(addon_output_dir))
|
||||
|
||||
addon_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for src_path, dst_subpath in files_mapping:
|
||||
dst_path = addon_output_dir / dst_subpath
|
||||
dst_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src_path, dst_path)
|
||||
|
||||
if client_zip_content is not None:
|
||||
private_dir = addon_output_dir / "private"
|
||||
private_dir.mkdir(parents=True, exist_ok=True)
|
||||
with open(private_dir / "client.zip", "wb") as stream:
|
||||
stream.write(client_zip_content.read())
|
||||
|
||||
|
||||
def main(
|
||||
output_dir=None,
|
||||
skip_zip=True,
|
||||
clear_output_dir=False,
|
||||
addons=None,
|
||||
):
|
||||
current_dir = Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
create_zip = not skip_zip
|
||||
|
||||
if output_dir:
|
||||
output_dir = Path(output_dir)
|
||||
else:
|
||||
output_dir = current_dir / "packages"
|
||||
|
||||
if output_dir.exists() and clear_output_dir:
|
||||
shutil.rmtree(str(output_dir))
|
||||
|
||||
print("Package creation started...")
|
||||
print(f"Output directory: {output_dir}")
|
||||
|
||||
# Make sure output dir is created
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
ignored_addons = set(IGNORED_HOSTS) | set(IGNORED_MODULES)
|
||||
for addon_dir in current_dir.iterdir():
|
||||
if not addon_dir.is_dir():
|
||||
continue
|
||||
|
||||
if addons and addon_dir.name not in addons:
|
||||
continue
|
||||
|
||||
if addon_dir.name in ignored_addons:
|
||||
continue
|
||||
|
||||
server_dir = addon_dir / "server"
|
||||
if not server_dir.exists():
|
||||
continue
|
||||
|
||||
create_addon_package(addon_dir, output_dir, create_zip)
|
||||
|
||||
print(f"- package '{addon_dir.name}' created")
|
||||
print(f"Package creation finished. Output directory: {output_dir}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--skip-zip",
|
||||
dest="skip_zip",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Skip zipping server package and create only"
|
||||
" server folder structure."
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keep-sources",
|
||||
dest="keep_sources",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Keep folder structure when server package is created."
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
dest="output_dir",
|
||||
default=None,
|
||||
help=(
|
||||
"Directory path where package will be created"
|
||||
" (Will be purged if already exists!)"
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c", "--clear-output-dir",
|
||||
dest="clear_output_dir",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Clear output directory before package creation."
|
||||
)
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--addon",
|
||||
dest="addons",
|
||||
action="append",
|
||||
help="Limit addon creation to given addon name",
|
||||
)
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
if args.keep_sources:
|
||||
print("Keeping sources is not supported anymore!")
|
||||
|
||||
main(
|
||||
args.output_dir,
|
||||
args.skip_zip,
|
||||
args.clear_output_dir,
|
||||
args.addons,
|
||||
)
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
from .addon import DeadlineAddon
|
||||
from .version import __version__
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineAddon",
|
||||
"__version__"
|
||||
)
|
||||
|
|
@ -1,617 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Abstract package for submitting jobs to Deadline.
|
||||
|
||||
It provides Deadline JobInfo data class.
|
||||
|
||||
"""
|
||||
import json.decoder
|
||||
import os
|
||||
from abc import abstractmethod
|
||||
import platform
|
||||
import getpass
|
||||
from functools import partial
|
||||
from collections import OrderedDict
|
||||
|
||||
import six
|
||||
import attr
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import (
|
||||
AbstractMetaInstancePlugin,
|
||||
KnownPublishError,
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
|
||||
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
|
||||
|
||||
|
||||
def requests_post(*args, **kwargs):
|
||||
"""Wrap request post method.
|
||||
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth) # explicit cast to tuple
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.post(*args, **kwargs)
|
||||
|
||||
|
||||
def requests_get(*args, **kwargs):
|
||||
"""Wrap request get method.
|
||||
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth)
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.get(*args, **kwargs)
|
||||
|
||||
|
||||
class DeadlineKeyValueVar(dict):
|
||||
"""
|
||||
|
||||
Serializes dictionary key values as "{key}={value}" like Deadline uses
|
||||
for EnvironmentKeyValue.
|
||||
|
||||
As an example:
|
||||
EnvironmentKeyValue0="A_KEY=VALUE_A"
|
||||
EnvironmentKeyValue1="OTHER_KEY=VALUE_B"
|
||||
|
||||
The keys are serialized in alphabetical order (sorted).
|
||||
|
||||
Example:
|
||||
>>> var = DeadlineKeyValueVar("EnvironmentKeyValue")
|
||||
>>> var["my_var"] = "hello"
|
||||
>>> var["my_other_var"] = "hello2"
|
||||
>>> var.serialize()
|
||||
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineKeyValueVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): "{}={}".format(var_key, var_value)
|
||||
for index, (var_key, var_value) in enumerate(sorted(self.items()))
|
||||
}
|
||||
|
||||
|
||||
class DeadlineIndexedVar(dict):
|
||||
"""
|
||||
|
||||
Allows to set and query values by integer indices:
|
||||
Query: var[1] or var.get(1)
|
||||
Set: var[1] = "my_value"
|
||||
Append: var += "value"
|
||||
|
||||
Note: Iterating the instance is not guarantueed to be the order of the
|
||||
indices. To do so iterate with `sorted()`
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineIndexedVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): value for index, value in sorted(self.items())
|
||||
}
|
||||
|
||||
def next_available_index(self):
|
||||
# Add as first unused entry
|
||||
i = 0
|
||||
while i in self.keys():
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def update(self, data):
|
||||
# Force the integer key check
|
||||
for key, value in data.items():
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __iadd__(self, other):
|
||||
index = self.next_available_index()
|
||||
self[index] = other
|
||||
return self
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, int):
|
||||
raise TypeError("Key must be an integer: {}".format(key))
|
||||
|
||||
if key < 0:
|
||||
raise ValueError("Negative index can't be set: {}".format(key))
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlineJobInfo(object):
|
||||
"""Mapping of all Deadline *JobInfo* attributes.
|
||||
|
||||
This contains all JobInfo attributes plus their default values.
|
||||
Those attributes set to `None` shouldn't be posted to Deadline as
|
||||
the only required one is `Plugin`. Their default values used by Deadline
|
||||
are stated in
|
||||
comments.
|
||||
|
||||
..seealso:
|
||||
https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html
|
||||
|
||||
"""
|
||||
|
||||
# Required
|
||||
# ----------------------------------------------
|
||||
Plugin = attr.ib()
|
||||
|
||||
# General
|
||||
Frames = attr.ib(default=None) # default: 0
|
||||
Name = attr.ib(default="Untitled")
|
||||
Comment = attr.ib(default=None) # default: empty
|
||||
Department = attr.ib(default=None) # default: empty
|
||||
BatchName = attr.ib(default=None) # default: empty
|
||||
UserName = attr.ib(default=getpass.getuser())
|
||||
MachineName = attr.ib(default=platform.node())
|
||||
Pool = attr.ib(default=None) # default: "none"
|
||||
SecondaryPool = attr.ib(default=None)
|
||||
Group = attr.ib(default=None) # default: "none"
|
||||
Priority = attr.ib(default=50)
|
||||
ChunkSize = attr.ib(default=1)
|
||||
ConcurrentTasks = attr.ib(default=1)
|
||||
LimitConcurrentTasksToNumberOfCpus = attr.ib(
|
||||
default=None) # default: "true"
|
||||
OnJobComplete = attr.ib(default="Nothing")
|
||||
SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false
|
||||
ForceReloadPlugin = attr.ib(default=None) # default: false
|
||||
Sequential = attr.ib(default=None) # default: false
|
||||
SuppressEvents = attr.ib(default=None) # default: false
|
||||
Protected = attr.ib(default=None) # default: false
|
||||
InitialStatus = attr.ib(default="Active")
|
||||
NetworkRoot = attr.ib(default=None)
|
||||
|
||||
# Timeouts
|
||||
# ----------------------------------------------
|
||||
MinRenderTimeSeconds = attr.ib(default=None) # Default: 0
|
||||
MinRenderTimeMinutes = attr.ib(default=None) # Default: 0
|
||||
TaskTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
TaskTimeoutMinutes = attr.ib(default=None) # Default: 0
|
||||
StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0
|
||||
InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
# can be one of <Error/Notify/ErrorAndNotify/Complete>
|
||||
OnTaskTimeout = attr.ib(default=None) # Default: Error
|
||||
EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false
|
||||
EnableFrameTimeouts = attr.ib(default=None) # Default: false
|
||||
EnableAutoTimeout = attr.ib(default=None) # Default: false
|
||||
|
||||
# Interruptible
|
||||
# ----------------------------------------------
|
||||
Interruptible = attr.ib(default=None) # Default: false
|
||||
InterruptiblePercentage = attr.ib(default=None)
|
||||
RemTimeThreshold = attr.ib(default=None)
|
||||
|
||||
# Notifications
|
||||
# ----------------------------------------------
|
||||
# can be comma separated list of users
|
||||
NotificationTargets = attr.ib(default=None) # Default: blank
|
||||
ClearNotificationTargets = attr.ib(default=None) # Default: false
|
||||
# A comma separated list of additional email addresses
|
||||
NotificationEmails = attr.ib(default=None) # Default: blank
|
||||
OverrideNotificationMethod = attr.ib(default=None) # Default: false
|
||||
EmailNotification = attr.ib(default=None) # Default: false
|
||||
PopupNotification = attr.ib(default=None) # Default: false
|
||||
# String with `[EOL]` used for end of line
|
||||
NotificationNote = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Machine Limit
|
||||
# ----------------------------------------------
|
||||
MachineLimit = attr.ib(default=None) # Default: 0
|
||||
MachineLimitProgress = attr.ib(default=None) # Default: -1.0
|
||||
Whitelist = attr.ib(default=None) # Default: blank
|
||||
Blacklist = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Limits
|
||||
# ----------------------------------------------
|
||||
# comma separated list of limit groups
|
||||
LimitGroups = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Dependencies
|
||||
# ----------------------------------------------
|
||||
# comma separated list of job IDs
|
||||
JobDependencies = attr.ib(default=None) # Default: blank
|
||||
JobDependencyPercentage = attr.ib(default=None) # Default: -1
|
||||
IsFrameDependent = attr.ib(default=None) # Default: false
|
||||
FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0
|
||||
FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0
|
||||
ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true
|
||||
ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false
|
||||
ResumeOnFailedDependencies = attr.ib(default=None) # Default: false
|
||||
# comma separated list of asset paths
|
||||
RequiredAssets = attr.ib(default=None) # Default: blank
|
||||
# comma separated list of script paths
|
||||
ScriptDependencies = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Failure Detection
|
||||
# ----------------------------------------------
|
||||
OverrideJobFailureDetection = attr.ib(default=None) # Default: false
|
||||
FailureDetectionJobErrors = attr.ib(default=None) # 0..x
|
||||
OverrideTaskFailureDetection = attr.ib(default=None) # Default: false
|
||||
FailureDetectionTaskErrors = attr.ib(default=None) # 0..x
|
||||
IgnoreBadJobDetection = attr.ib(default=None) # Default: false
|
||||
SendJobErrorWarning = attr.ib(default=None) # Default: false
|
||||
|
||||
# Cleanup
|
||||
# ----------------------------------------------
|
||||
DeleteOnComplete = attr.ib(default=None) # Default: false
|
||||
ArchiveOnComplete = attr.ib(default=None) # Default: false
|
||||
OverrideAutoJobCleanup = attr.ib(default=None) # Default: false
|
||||
OverrideJobCleanup = attr.ib(default=None)
|
||||
JobCleanupDays = attr.ib(default=None) # Default: false
|
||||
# <ArchiveJobs/DeleteJobs>
|
||||
OverrideJobCleanupType = attr.ib(default=None)
|
||||
|
||||
# Scheduling
|
||||
# ----------------------------------------------
|
||||
# <None/Once/Daily/Custom>
|
||||
ScheduledType = attr.ib(default=None) # Default: None
|
||||
# <dd/MM/yyyy HH:mm>
|
||||
ScheduledStartDateTime = attr.ib(default=None)
|
||||
ScheduledDays = attr.ib(default=None) # Default: 1
|
||||
# <dd:hh:mm:ss>
|
||||
JobDelay = attr.ib(default=None)
|
||||
# <Day of the Week><Start/Stop>Time=<HH:mm:ss>
|
||||
Scheduled = attr.ib(default=None)
|
||||
|
||||
# Scripts
|
||||
# ----------------------------------------------
|
||||
# all accept path to script
|
||||
PreJobScript = attr.ib(default=None) # Default: blank
|
||||
PostJobScript = attr.ib(default=None) # Default: blank
|
||||
PreTaskScript = attr.ib(default=None) # Default: blank
|
||||
PostTaskScript = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Event Opt-Ins
|
||||
# ----------------------------------------------
|
||||
# comma separated list of plugins
|
||||
EventOptIns = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Environment
|
||||
# ----------------------------------------------
|
||||
EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"EnvironmentKeyValue"))
|
||||
|
||||
IncludeEnvironment = attr.ib(default=None) # Default: false
|
||||
UseJobEnvironmentOnly = attr.ib(default=None) # Default: false
|
||||
CustomPluginDirectory = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Job Extra Info
|
||||
# ----------------------------------------------
|
||||
ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo"))
|
||||
ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"ExtraInfoKeyValue"))
|
||||
|
||||
# Task Extra Info Names
|
||||
# ----------------------------------------------
|
||||
OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false
|
||||
TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"TaskExtraInfoName"))
|
||||
|
||||
# Output
|
||||
# ----------------------------------------------
|
||||
OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename"))
|
||||
OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename{}Tile"))
|
||||
OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputDirectory"))
|
||||
|
||||
# Asset Dependency
|
||||
# ----------------------------------------------
|
||||
AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"AssetDependency"))
|
||||
|
||||
# Tile Job
|
||||
# ----------------------------------------------
|
||||
TileJob = attr.ib(default=None) # Default: false
|
||||
TileJobFrame = attr.ib(default=None) # Default: 0
|
||||
TileJobTilesInX = attr.ib(default=None) # Default: 0
|
||||
TileJobTilesInY = attr.ib(default=None) # Default: 0
|
||||
TileJobTileCount = attr.ib(default=None) # Default: 0
|
||||
|
||||
# Maintenance Job
|
||||
# ----------------------------------------------
|
||||
MaintenanceJob = attr.ib(default=None) # Default: false
|
||||
MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0
|
||||
MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0
|
||||
|
||||
def serialize(self):
|
||||
"""Return all data serialized as dictionary.
|
||||
|
||||
Returns:
|
||||
OrderedDict: all serialized data.
|
||||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
serialized = attr.asdict(
|
||||
self, dict_factory=OrderedDict, filter=filter_data)
|
||||
|
||||
# Custom serialize these attributes
|
||||
for attribute in [
|
||||
self.EnvironmentKeyValue,
|
||||
self.ExtraInfo,
|
||||
self.ExtraInfoKeyValue,
|
||||
self.TaskExtraInfoName,
|
||||
self.OutputFilename,
|
||||
self.OutputFilenameTile,
|
||||
self.OutputDirectory,
|
||||
self.AssetDependency
|
||||
]:
|
||||
serialized.update(attribute.serialize())
|
||||
|
||||
return serialized
|
||||
|
||||
def update(self, data):
|
||||
"""Update instance with data dict"""
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def add_render_job_env_var(self):
|
||||
"""Check if in OP or AYON mode and use appropriate env var."""
|
||||
self.EnvironmentKeyValue["AYON_RENDER_JOB"] = "1"
|
||||
self.EnvironmentKeyValue["AYON_BUNDLE_NAME"] = (
|
||||
os.environ["AYON_BUNDLE_NAME"])
|
||||
|
||||
|
||||
@six.add_metaclass(AbstractMetaInstancePlugin)
|
||||
class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Class abstracting access to Deadline."""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
|
||||
import_reference = False
|
||||
use_published = True
|
||||
asset_dependencies = False
|
||||
default_priority = 50
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AbstractSubmitDeadline, self).__init__(*args, **kwargs)
|
||||
self._instance = None
|
||||
self._deadline_url = None
|
||||
self.scene_path = None
|
||||
self.job_info = None
|
||||
self.plugin_info = None
|
||||
self.aux_files = None
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = instance.data["deadline"]["url"]
|
||||
|
||||
assert self._deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
file_path = None
|
||||
if self.use_published:
|
||||
if not self.import_reference:
|
||||
file_path = self.from_published_scene()
|
||||
else:
|
||||
self.log.info("use the scene with imported reference for rendering") # noqa
|
||||
file_path = context.data["currentFile"]
|
||||
|
||||
# fallback if nothing was set
|
||||
if not file_path:
|
||||
self.log.warning("Falling back to workfile")
|
||||
file_path = context.data["currentFile"]
|
||||
|
||||
self.scene_path = file_path
|
||||
self.log.info("Using {} for render/export.".format(file_path))
|
||||
|
||||
self.job_info = self.get_job_info()
|
||||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
job_id = self.process_submission()
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
if instance.data.get("splitRender"):
|
||||
self.log.info("Splitting export and render in two jobs")
|
||||
self.log.info("Export job id: %s", job_id)
|
||||
render_job_info = self.get_job_info(dependency_job_ids=[job_id])
|
||||
render_plugin_info = self.get_plugin_info(job_type="render")
|
||||
payload = self.assemble_payload(
|
||||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
render_job_id = self.submit(payload, auth, verify)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
from them and submit it do Deadline.
|
||||
|
||||
Returns:
|
||||
str: Deadline job ID
|
||||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth, verify)
|
||||
|
||||
@abstractmethod
|
||||
def get_job_info(self):
|
||||
"""Return filled Deadline JobInfo.
|
||||
|
||||
This is host/plugin specific implementation of how to fill data in.
|
||||
|
||||
See:
|
||||
:class:`DeadlineJobInfo`
|
||||
|
||||
Returns:
|
||||
:class:`DeadlineJobInfo`: Filled Deadline JobInfo.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_plugin_info(self):
|
||||
"""Return filled Deadline PluginInfo.
|
||||
|
||||
This is host/plugin specific implementation of how to fill data in.
|
||||
|
||||
See:
|
||||
:class:`DeadlineJobInfo`
|
||||
|
||||
Returns:
|
||||
dict: Filled Deadline JobInfo.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_aux_files(self):
|
||||
"""Return list of auxiliary files for Deadline job.
|
||||
|
||||
If needed this should be overridden, otherwise return empty list as
|
||||
that field even empty must be present on Deadline submission.
|
||||
|
||||
Returns:
|
||||
list: List of files.
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
def from_published_scene(self, replace_in_path=True):
|
||||
"""Switch work scene for published scene.
|
||||
|
||||
If rendering/exporting from published scenes is enabled, this will
|
||||
replace paths from working scene to published scene.
|
||||
|
||||
Args:
|
||||
replace_in_path (bool): if True, it will try to find
|
||||
old scene name in path of expected files and replace it
|
||||
with name of published scene.
|
||||
|
||||
Returns:
|
||||
str: Published scene path.
|
||||
None: if no published scene is found.
|
||||
|
||||
Note:
|
||||
Published scene path is actually determined from project Anatomy
|
||||
as at the time this plugin is running scene can still no be
|
||||
published.
|
||||
|
||||
"""
|
||||
return replace_with_published_scene_path(
|
||||
self._instance, replace_in_path=replace_in_path)
|
||||
|
||||
def assemble_payload(
|
||||
self, job_info=None, plugin_info=None, aux_files=None):
|
||||
"""Assemble payload data from its various parts.
|
||||
|
||||
Args:
|
||||
job_info (DeadlineJobInfo): Deadline JobInfo. You can use
|
||||
:class:`DeadlineJobInfo` for it.
|
||||
plugin_info (dict): Deadline PluginInfo. Plugin specific options.
|
||||
aux_files (list, optional): List of auxiliary file to submit with
|
||||
the job.
|
||||
|
||||
Returns:
|
||||
dict: Deadline Payload.
|
||||
|
||||
"""
|
||||
job = job_info or self.job_info
|
||||
return {
|
||||
"JobInfo": job.serialize(),
|
||||
"PluginInfo": plugin_info or self.plugin_info,
|
||||
"AuxFiles": aux_files or self.aux_files
|
||||
}
|
||||
|
||||
def submit(self, payload, auth, verify):
|
||||
"""Submit payload to Deadline API end-point.
|
||||
|
||||
This takes payload in the form of JSON file and POST it to
|
||||
Deadline jobs end-point.
|
||||
|
||||
Args:
|
||||
payload (dict): dict to become json in deadline submission.
|
||||
auth (tuple): (username, password)
|
||||
verify (bool): verify SSL certificate if present
|
||||
|
||||
Returns:
|
||||
str: resulting Deadline job id.
|
||||
|
||||
Throws:
|
||||
KnownPublishError: if submission fails.
|
||||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
response = requests_post(
|
||||
url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
self.log.error(response.content)
|
||||
self.log.debug(payload)
|
||||
raise KnownPublishError(response.text)
|
||||
|
||||
try:
|
||||
result = response.json()
|
||||
except JSONDecodeError:
|
||||
msg = "Broken response {}. ".format(response)
|
||||
msg += "Try restarting the Deadline Webservice."
|
||||
self.log.warning(msg, exc_info=True)
|
||||
raise KnownPublishError("Broken response from DL")
|
||||
|
||||
# for submit publish job
|
||||
self._instance.data["deadlineSubmissionJob"] = result
|
||||
|
||||
return result["_id"]
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import requests
|
||||
import six
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.addon import AYONAddon, IPluginPaths
|
||||
|
||||
from .version import __version__
|
||||
|
||||
|
||||
class DeadlineWebserviceError(Exception):
|
||||
"""
|
||||
Exception to throw when connection to Deadline server fails.
|
||||
"""
|
||||
|
||||
|
||||
class DeadlineAddon(AYONAddon, IPluginPaths):
|
||||
name = "deadline"
|
||||
version = __version__
|
||||
|
||||
def initialize(self, studio_settings):
|
||||
deadline_settings = studio_settings[self.name]
|
||||
deadline_servers_info = {
|
||||
url_item["name"]: url_item
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
|
||||
if not deadline_servers_info:
|
||||
self.enabled = False
|
||||
self.log.warning((
|
||||
"Deadline Webservice URLs are not specified. Disabling addon."
|
||||
))
|
||||
|
||||
self.deadline_servers_info = deadline_servers_info
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_deadline_pools(webservice, auth=None, log=None):
|
||||
"""Get pools from Deadline.
|
||||
Args:
|
||||
webservice (str): Server url.
|
||||
auth (Optional[Tuple[str, str]]): Tuple containing username,
|
||||
password
|
||||
log (Optional[Logger]): Logger to log errors to, if provided.
|
||||
Returns:
|
||||
List[str]: Pools.
|
||||
Throws:
|
||||
RuntimeError: If deadline webservice is unreachable.
|
||||
|
||||
"""
|
||||
from .abstract_submit_deadline import requests_get
|
||||
|
||||
if not log:
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
argument = "{}/api/pools?NamesOnly=true".format(webservice)
|
||||
try:
|
||||
kwargs = {}
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(argument, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
msg = 'Cannot connect to DL web service {}'.format(webservice)
|
||||
log.error(msg)
|
||||
six.reraise(
|
||||
DeadlineWebserviceError,
|
||||
DeadlineWebserviceError('{} - {}'.format(msg, exc)),
|
||||
sys.exc_info()[2])
|
||||
if not response.ok:
|
||||
log.warning("No pools retrieved")
|
||||
return []
|
||||
|
||||
return response.json()
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
# describes list of product typed used for plugin filtering for farm publishing
|
||||
FARM_FAMILIES = [
|
||||
"render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop", "redshift_rop",
|
||||
"renderFarm", "usdrender", "publish.hou"
|
||||
]
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Deadline servers from instance.
|
||||
|
||||
This is resolving index of server lists stored in `deadlineServers` instance
|
||||
attribute or using default server if that attribute doesn't exists.
|
||||
|
||||
"""
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
# Run before collect_render.
|
||||
order = pyblish.api.CollectorOrder + 0.225
|
||||
label = "Deadline Webservice from the Instance"
|
||||
targets = ["local"]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
if not instance.data.get("deadline"):
|
||||
instance.data["deadline"] = {}
|
||||
|
||||
# todo: separate logic should be removed, all hosts should have same
|
||||
host_name = instance.context.data["hostName"]
|
||||
if host_name == "maya":
|
||||
deadline_url = self._collect_deadline_url(instance)
|
||||
else:
|
||||
deadline_url = (instance.data.get("deadlineUrl") or # backwards
|
||||
instance.data.get("deadline", {}).get("url"))
|
||||
if deadline_url:
|
||||
instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/")
|
||||
else:
|
||||
instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa
|
||||
self.log.debug(
|
||||
"Using {} for submission".format(instance.data["deadline"]["url"]))
|
||||
|
||||
def _collect_deadline_url(self, render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Deadline Webservice URL from render instance.
|
||||
|
||||
This will get all configured Deadline Webservice URLs and create
|
||||
subset of them based upon project configuration. It will then take
|
||||
`deadlineServers` from render instance that is now basically `int`
|
||||
index of that list.
|
||||
|
||||
Args:
|
||||
render_instance (pyblish.api.Instance): Render instance created
|
||||
by Creator in Maya.
|
||||
|
||||
Returns:
|
||||
str: Selected Deadline Webservice URL.
|
||||
|
||||
"""
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
deadline_settings = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
)
|
||||
default_server_url = (render_instance.context.data["deadline"]
|
||||
["defaultUrl"])
|
||||
# QUESTION How and where is this is set? Should be removed?
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
self.log.debug("Using default server.")
|
||||
return default_server_url
|
||||
|
||||
# Get instance server as sting.
|
||||
if isinstance(instance_server, int):
|
||||
instance_server = cmds.getAttr(
|
||||
"{}.deadlineServers".format(render_instance.data["objset"]),
|
||||
asString=True
|
||||
)
|
||||
|
||||
default_servers = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_servers_info"]
|
||||
}
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
if not project_servers:
|
||||
self.log.debug("Not project servers found. Using default servers.")
|
||||
return default_servers[instance_server]
|
||||
|
||||
project_enabled_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
if instance_server not in project_enabled_servers:
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
)
|
||||
)
|
||||
raise KnownPublishError(msg)
|
||||
|
||||
self.log.debug("Using project approved server.")
|
||||
return project_enabled_servers[instance_server]
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
||||
"""Collect default Deadline Webservice URL.
|
||||
|
||||
DL webservice addresses must be configured first in System Settings for
|
||||
project settings enum to work.
|
||||
|
||||
Default webservice could be overridden by
|
||||
`project_settings/deadline/deadline_servers`. Currently only single url
|
||||
is expected.
|
||||
|
||||
This url could be overridden by some hosts directly on instances with
|
||||
`CollectDeadlineServerFromInstance`.
|
||||
"""
|
||||
|
||||
# Run before collect_deadline_server_instance.
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = "Default Deadline Webservice"
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
deadline_addon = context.data["ayonAddonsManager"]["deadline"]
|
||||
except AttributeError:
|
||||
self.log.error("Cannot get AYON Deadline addon.")
|
||||
raise AssertionError("AYON Deadline addon not found.")
|
||||
|
||||
deadline_settings = context.data["project_settings"]["deadline"]
|
||||
deadline_server_name = deadline_settings["deadline_server"]
|
||||
|
||||
dl_server_info = None
|
||||
if deadline_server_name:
|
||||
dl_server_info = deadline_addon.deadline_servers_info.get(
|
||||
deadline_server_name)
|
||||
|
||||
if dl_server_info:
|
||||
deadline_url = dl_server_info["value"]
|
||||
else:
|
||||
default_dl_server_info = deadline_addon.deadline_servers_info[0]
|
||||
deadline_url = default_dl_server_info["value"]
|
||||
|
||||
context.data["deadline"] = {}
|
||||
context.data["deadline"]["defaultUrl"] = (
|
||||
deadline_url.strip().rstrip("/"))
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from ayon_core.lib import TextDef
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Collect pools from instance or Publisher attributes, from Setting
|
||||
otherwise.
|
||||
|
||||
Pools are used to control which DL workers could render the job.
|
||||
|
||||
Pools might be set:
|
||||
- directly on the instance (set directly in DCC)
|
||||
- from Publisher attributes
|
||||
- from defaults from Settings.
|
||||
|
||||
Publisher attributes could be shown even for instances that should be
|
||||
rendered locally as visibility is driven by product type of the instance
|
||||
(which will be `render` most likely).
|
||||
(Might be resolved in the future and class attribute 'families' should
|
||||
be cleaned up.)
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.420
|
||||
label = "Collect Deadline Pools"
|
||||
hosts = [
|
||||
"aftereffects",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini",
|
||||
"nuke",
|
||||
]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
primary_pool = None
|
||||
secondary_pool = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# deadline.publish.CollectDeadlinePools
|
||||
settings = project_settings["deadline"]["publish"]["CollectDeadlinePools"] # noqa
|
||||
cls.primary_pool = settings.get("primary_pool", None)
|
||||
cls.secondary_pool = settings.get("secondary_pool", None)
|
||||
|
||||
def process(self, instance):
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
if not instance.data.get("primaryPool"):
|
||||
instance.data["primaryPool"] = (
|
||||
attr_values.get("primaryPool") or self.primary_pool or "none"
|
||||
)
|
||||
if instance.data["primaryPool"] == "-":
|
||||
instance.data["primaryPool"] = None
|
||||
|
||||
if not instance.data.get("secondaryPool"):
|
||||
instance.data["secondaryPool"] = (
|
||||
attr_values.get("secondaryPool") or self.secondary_pool or "none" # noqa
|
||||
)
|
||||
|
||||
if instance.data["secondaryPool"] == "-":
|
||||
instance.data["secondaryPool"] = None
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
# TODO: Preferably this would be an enum for the user
|
||||
# but the Deadline server URL can be dynamic and
|
||||
# can be set per render instance. Since get_attribute_defs
|
||||
# can't be dynamic unfortunately EnumDef isn't possible (yet?)
|
||||
# pool_names = self.deadline_addon.get_deadline_pools(deadline_url,
|
||||
# self.log)
|
||||
# secondary_pool_names = ["-"] + pool_names
|
||||
|
||||
return [
|
||||
TextDef("primaryPool",
|
||||
label="Primary Pool",
|
||||
default=cls.primary_pool,
|
||||
tooltip="Deadline primary pool, "
|
||||
"applicable for farm rendering"),
|
||||
TextDef("secondaryPool",
|
||||
label="Secondary Pool",
|
||||
default=cls.secondary_pool,
|
||||
tooltip="Deadline secondary pool, "
|
||||
"applicable for farm rendering")
|
||||
]
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect user credentials
|
||||
|
||||
Requires:
|
||||
context -> project_settings
|
||||
instance.data["deadline"]["url"]
|
||||
|
||||
Provides:
|
||||
instance.data["deadline"] -> require_authentication (bool)
|
||||
instance.data["deadline"] -> auth (tuple (str, str)) -
|
||||
(username, password) or None
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_api import get_server_api_connection
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
||||
"""Collects user name and password for artist if DL requires authentication
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.250
|
||||
label = "Collect Deadline User Credentials"
|
||||
|
||||
targets = ["local"]
|
||||
hosts = ["aftereffects",
|
||||
"blender",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"nuke",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini"]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
collected_deadline_url = instance.data["deadline"]["url"]
|
||||
if not collected_deadline_url:
|
||||
raise ValueError("Instance doesn't have '[deadline][url]'.")
|
||||
context_data = instance.context.data
|
||||
deadline_settings = context_data["project_settings"]["deadline"]
|
||||
|
||||
deadline_server_name = None
|
||||
# deadline url might be set directly from instance, need to find
|
||||
# metadata for it
|
||||
for deadline_info in deadline_settings["deadline_urls"]:
|
||||
dl_settings_url = deadline_info["value"].strip().rstrip("/")
|
||||
if dl_settings_url == collected_deadline_url:
|
||||
deadline_server_name = deadline_info["name"]
|
||||
break
|
||||
|
||||
if not deadline_server_name:
|
||||
raise ValueError(f"Collected {collected_deadline_url} doesn't "
|
||||
"match any site configured in Studio Settings")
|
||||
|
||||
instance.data["deadline"]["require_authentication"] = (
|
||||
deadline_info["require_authentication"]
|
||||
)
|
||||
instance.data["deadline"]["auth"] = None
|
||||
|
||||
instance.data["deadline"]["verify"] = (
|
||||
not deadline_info["not_verify_ssl"])
|
||||
|
||||
if not deadline_info["require_authentication"]:
|
||||
return
|
||||
|
||||
addons_manager = instance.context.data["ayonAddonsManager"]
|
||||
deadline_addon = addons_manager["deadline"]
|
||||
# TODO import 'get_addon_site_settings' when available
|
||||
# in public 'ayon_api'
|
||||
local_settings = get_server_api_connection().get_addon_site_settings(
|
||||
deadline_addon.name, deadline_addon.version)
|
||||
local_settings = local_settings["local_settings"]
|
||||
for server_info in local_settings:
|
||||
if deadline_server_name == server_info["server_name"]:
|
||||
instance.data["deadline"]["auth"] = (server_info["username"],
|
||||
server_info["password"])
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Authentication</title>
|
||||
<description>
|
||||
## Deadline authentication is required
|
||||
|
||||
This project has set in Settings that Deadline requires authentication.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Please go to Ayon Server > Site Settings and provide your Deadline username and password.
|
||||
In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator.
|
||||
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Pools</title>
|
||||
<description>
|
||||
## Invalid Deadline pools found
|
||||
|
||||
Configured pools don't match available pools in Deadline.
|
||||
|
||||
### How to repair?
|
||||
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
|
||||
In other cases inform admin to change them in Settings.
|
||||
|
||||
Available deadline pools:
|
||||
|
||||
{pools_str}
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__
|
||||
|
||||
This error is shown when a configured pool is not available on Deadline. It
|
||||
can happen when publishing old workfiles which were created with previous
|
||||
deadline pools, or someone changed the available pools in Deadline,
|
||||
but didn't modify AYON Settings to match the changes.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
import getpass
|
||||
import pyblish.api
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.lib import (
|
||||
env_value_to_bool,
|
||||
collect_frames,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlinePluginInfo():
|
||||
Comp = attr.ib(default=None)
|
||||
SceneFile = attr.ib(default=None)
|
||||
OutputFilePath = attr.ib(default=None)
|
||||
Output = attr.ib(default=None)
|
||||
StartupDirectory = attr.ib(default=None)
|
||||
Arguments = attr.ib(default=None)
|
||||
ProjectPath = attr.ib(default=None)
|
||||
AWSAssetFile0 = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
MultiProcess = attr.ib(default=None)
|
||||
|
||||
|
||||
class AfterEffectsSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
|
||||
label = "Submit AE to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["aftereffects"]
|
||||
families = ["render.farm"] # cannot be "render' as that is integrated
|
||||
use_published = True
|
||||
targets = ["local"]
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1000000
|
||||
group = None
|
||||
department = None
|
||||
multiprocess = True
|
||||
|
||||
def get_job_info(self):
|
||||
dln_job_info = DeadlineJobInfo(Plugin="AfterEffects")
|
||||
|
||||
context = self._instance.context
|
||||
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
dln_job_info.Name = self._instance.data["name"]
|
||||
dln_job_info.BatchName = batch_name
|
||||
dln_job_info.Plugin = "AfterEffects"
|
||||
dln_job_info.UserName = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
# Deadline requires integers in frame range
|
||||
frame_range = "{}-{}".format(
|
||||
int(round(self._instance.data["frameStart"])),
|
||||
int(round(self._instance.data["frameEnd"])))
|
||||
dln_job_info.Frames = frame_range
|
||||
|
||||
dln_job_info.Priority = self.priority
|
||||
dln_job_info.Pool = self._instance.data.get("primaryPool")
|
||||
dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
dln_job_info.Group = self.group
|
||||
dln_job_info.Department = self.department
|
||||
dln_job_info.ChunkSize = self.chunk_size
|
||||
dln_job_info.OutputFilename += \
|
||||
os.path.basename(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.OutputDirectory += \
|
||||
os.path.dirname(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.JobDelay = "00:00:00"
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
dln_job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
dln_job_info.add_render_job_env_var()
|
||||
|
||||
return dln_job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
deadline_plugin_info = DeadlinePluginInfo()
|
||||
|
||||
render_path = self._instance.data["expectedFiles"][0]
|
||||
|
||||
file_name, frame = list(collect_frames([render_path]).items())[0]
|
||||
if frame:
|
||||
# replace frame ('000001') with Deadline's required '[#######]'
|
||||
# expects filename in format project_folder_product_version.FRAME.ext
|
||||
render_dir = os.path.dirname(render_path)
|
||||
file_name = os.path.basename(render_path)
|
||||
hashed = '[{}]'.format(len(frame) * "#")
|
||||
file_name = file_name.replace(frame, hashed)
|
||||
render_path = os.path.join(render_dir, file_name)
|
||||
|
||||
deadline_plugin_info.Comp = self._instance.data["comp_name"]
|
||||
deadline_plugin_info.Version = self._instance.data["app_version"]
|
||||
# must be here because of DL AE plugin
|
||||
# added override of multiprocess by env var, if shouldn't be used for
|
||||
# some app variant use MULTIPROCESS:false in Settings, default is True
|
||||
env_multi = env_value_to_bool("MULTIPROCESS", default=True)
|
||||
deadline_plugin_info.MultiProcess = env_multi and self.multiprocess
|
||||
deadline_plugin_info.SceneFile = self.scene_path
|
||||
deadline_plugin_info.Output = render_path.replace("\\", "/")
|
||||
|
||||
return attr.asdict(deadline_plugin_info)
|
||||
|
||||
def from_published_scene(self):
|
||||
""" Do not overwrite expected files.
|
||||
|
||||
Use published is set to True, so rendering will be triggered
|
||||
from published scene (in 'publish' folder). Default implementation
|
||||
of abstract class renames expected (eg. rendered) files accordingly
|
||||
which is not needed here.
|
||||
"""
|
||||
return super().from_published_scene(False)
|
||||
|
|
@ -1,225 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline."""
|
||||
|
||||
import os
|
||||
import getpass
|
||||
import attr
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
from ayon_core.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class BlenderPluginInfo():
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
SaveFile = attr.ib(default=True)
|
||||
|
||||
|
||||
class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["blender"]
|
||||
families = ["render"]
|
||||
settings_category = "deadline"
|
||||
|
||||
use_published = True
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = None
|
||||
job_delay = "00:00:00:00"
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="Blender")
|
||||
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
|
||||
if is_in_tests():
|
||||
src_filename += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = f"{src_filename} - {instance.name}"
|
||||
job_info.BatchName = src_filename
|
||||
instance.data.get("blenderRenderPlugin", "Blender")
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
job_info.Comment = instance.data.get("comment")
|
||||
|
||||
if self.group != "none" and self.group:
|
||||
job_info.Group = self.group
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
render_globals = instance.data.setdefault("renderGlobals", {})
|
||||
machine_list = attr_values.get("machineList", "")
|
||||
if machine_list:
|
||||
if attr_values.get("whitelist", True):
|
||||
machine_list_key = "Whitelist"
|
||||
else:
|
||||
machine_list_key = "Blacklist"
|
||||
render_globals[machine_list_key] = machine_list
|
||||
|
||||
job_info.ChunkSize = attr_values.get("chunkSize", self.chunk_size)
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.ScheduledType = "Once"
|
||||
job_info.JobDelay = attr_values.get("job_delay", self.job_delay)
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize job from PYPE for turning Event On/Off
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Adding file dependencies.
|
||||
if self.asset_dependencies:
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
for dependency in dependencies:
|
||||
job_info.AssetDependency += dependency
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
import bpy
|
||||
|
||||
plugin_info = BlenderPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=bpy.app.version_string,
|
||||
SaveFile=True,
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self, auth=None):
|
||||
instance = self._instance
|
||||
|
||||
expected_files = instance.data["expectedFiles"]
|
||||
if not expected_files:
|
||||
raise RuntimeError("No Render Elements found!")
|
||||
|
||||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth=auth, verify=verify)
|
||||
|
||||
def from_published_scene(self):
|
||||
"""
|
||||
This is needed to set the correct path for the json metadata. Because
|
||||
the rendering path is set in the blend file during the collection,
|
||||
and the path is adjusted to use the published scene, this ensures that
|
||||
the metadata and the rendered files are in the same location.
|
||||
"""
|
||||
return super().from_published_scene(False)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(BlenderSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("use_published",
|
||||
default=cls.use_published,
|
||||
label="Use Published Scene"),
|
||||
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
|
||||
NumberDef("chunkSize",
|
||||
minimum=1,
|
||||
maximum=50,
|
||||
decimals=0,
|
||||
default=cls.chunk_size,
|
||||
label="Frame Per Task"),
|
||||
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
|
||||
TextDef("job_delay",
|
||||
default=cls.job_delay,
|
||||
label="Job Delay",
|
||||
placeholder="dd:hh:mm:ss",
|
||||
tooltip="Delay the job by the specified amount of time. "
|
||||
"Timecode: dd:hh:mm:ss."),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,271 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit CelAction2D scene to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit CelAction to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["celaction"]
|
||||
families = ["render.farm"]
|
||||
settings_category = "deadline"
|
||||
|
||||
deadline_department = ""
|
||||
deadline_priority = 50
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_job_delay = "00:00:08:00"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = instance.data["comment"]
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
self._frame_start = int(instance.data["frameStart"])
|
||||
self._frame_end = int(instance.data["frameEnd"])
|
||||
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
response = self.payload_submit(instance,
|
||||
script_path,
|
||||
render_path
|
||||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# adding 2d render specific family for version identification in Loader
|
||||
instance.data["families"] = ["render2d"]
|
||||
|
||||
def payload_submit(self,
|
||||
instance,
|
||||
script_path,
|
||||
render_path
|
||||
):
|
||||
resolution_width = instance.data["resolutionWidth"]
|
||||
resolution_height = instance.data["resolutionHeight"]
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
render_path = os.path.normpath(render_path)
|
||||
script_name = os.path.basename(script_path)
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in instance.context:
|
||||
if "workfile" in item.data["productType"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
jobname = "%s - %s" % (script_name, instance.name)
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(render_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.context.data.get("chunk")
|
||||
if not chunk_size:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
# search for %02d pattern in name, and padding number
|
||||
search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
padding_number = int(search_results[1])
|
||||
|
||||
args = [
|
||||
f"<QUOTE>{script_path}<QUOTE>",
|
||||
"-a",
|
||||
"-16",
|
||||
"-s <STARTFRAME>",
|
||||
"-e <ENDFRAME>",
|
||||
f"-d <QUOTE>{render_dir}<QUOTE>",
|
||||
f"-x {resolution_width}",
|
||||
f"-y {resolution_height}",
|
||||
f"-r <QUOTE>{render_path.replace(split_patern, '')}<QUOTE>",
|
||||
f"-= AbsoluteFrameNumber=on -= PadDigits={padding_number}",
|
||||
"-= ClearAttachment=on",
|
||||
]
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
# plugin definition
|
||||
"Plugin": "CelAction",
|
||||
|
||||
# Top-level group name
|
||||
"BatchName": script_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"Priority": self.deadline_priority,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool,
|
||||
"SecondaryPool": self.deadline_pool_secondary,
|
||||
"ChunkSize": chunk_size,
|
||||
|
||||
"Frames": f"{self._frame_start}-{self._frame_end}",
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
|
||||
# # Asset dependency to wait for at least
|
||||
# the scene file to sync.
|
||||
# "AssetDependency0": script_path
|
||||
"ScheduledType": "Once",
|
||||
"JobDelay": self.deadline_job_delay
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": script_path,
|
||||
|
||||
# Output directory
|
||||
"OutputFilePath": render_dir.replace("\\", "/"),
|
||||
|
||||
# Plugin attributes
|
||||
"StartupDirectory": "",
|
||||
"Arguments": " ".join(args),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": script_path,
|
||||
"AWSAssetFile0": render_path,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.debug("using render plugin : {}".format(plugin))
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expectied files to instance.data
|
||||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url, json=payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
self.log.error(
|
||||
"Submission failed! [{}] {}".format(
|
||||
response.status_code, response.content))
|
||||
self.log.debug(payload)
|
||||
raise SystemExit(response.text)
|
||||
|
||||
return response
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("frameStart", "frameEnd"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"[._](%0)(\d)(d)[._]", path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
split_path = path.split(split_patern)
|
||||
hashes = "#" * int(search_results[1])
|
||||
return "".join([split_path[0], hashes, split_path[-1]])
|
||||
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
|
||||
def expected_files(self, instance, filepath):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = []
|
||||
|
||||
dirpath = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
if "#" in filename:
|
||||
pparts = filename.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
filename = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in filename:
|
||||
instance.data["expectedFiles"].append(filepath)
|
||||
return
|
||||
|
||||
for i in range(self._frame_start, (self._frame_end + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dirpath, (filename % i)).replace("\\", "/")
|
||||
)
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import NumberDef
|
||||
|
||||
|
||||
class FusionSubmitDeadline(
|
||||
pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin
|
||||
):
|
||||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Fusion to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["fusion"]
|
||||
families = ["render"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
# presets
|
||||
plugin = None
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
group = ""
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurrency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
from ayon_fusion.api.lib import get_frame_path
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
saver_instances = []
|
||||
for inst in context:
|
||||
if inst.data["productType"] != "render":
|
||||
# Allow only saver family instances
|
||||
continue
|
||||
|
||||
if not inst.data.get("publish", True):
|
||||
# Skip inactive instances
|
||||
continue
|
||||
|
||||
self.log.debug(inst.data["name"])
|
||||
saver_instances.append(inst)
|
||||
|
||||
if not saver_instances:
|
||||
raise RuntimeError("No instances found for Deadline submission")
|
||||
|
||||
comment = instance.data.get("comment", "")
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in context:
|
||||
if "workfile" in item.data["families"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
filename = os.path.basename(script_path)
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": filename,
|
||||
|
||||
"Priority": attribute_values.get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": attribute_values.get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": attribute_values.get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
# User, as seen in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Group": self.group,
|
||||
|
||||
"Plugin": self.plugin,
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"])
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"FlowFile": script_path,
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": str(instance.data["app_version"]),
|
||||
|
||||
# Render in high quality
|
||||
"HighQuality": True,
|
||||
|
||||
# Whether saver output should be checked after rendering
|
||||
# is complete
|
||||
"CheckOutput": True,
|
||||
|
||||
# Proxy: higher numbers smaller images for faster test renders
|
||||
# 1 = no proxy quality
|
||||
"Proxy": 1
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Enable going to rendered frames from Deadline Monitor
|
||||
for index, instance in enumerate(saver_instances):
|
||||
head, padding, tail = get_frame_path(
|
||||
instance.data["expectedFiles"][0]
|
||||
)
|
||||
path = "{}{}{}".format(head, "#" * padding, tail)
|
||||
folder, filename = os.path.split(path)
|
||||
payload["JobInfo"]["OutputDirectory%d" % index] = folder
|
||||
payload["JobInfo"]["OutputFilename%d" % index] = filename
|
||||
|
||||
# Include critical variables with submission
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"AYON_IN_TESTS",
|
||||
"AYON_BUNDLE_NAME",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
# to recognize render jobs
|
||||
environment["AYON_RENDER_JOB"] = "1"
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store the response for dependent job submission plug-ins
|
||||
for instance in saver_instances:
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
|
@ -1,420 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline."""
|
||||
import os
|
||||
from pathlib import Path
|
||||
from collections import OrderedDict
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
from ayon_core.lib import is_in_tests
|
||||
|
||||
|
||||
class _ZipFile(ZipFile):
|
||||
"""Extended check for windows invalid characters."""
|
||||
|
||||
# this is extending default zipfile table for few invalid characters
|
||||
# that can come from Mac
|
||||
_windows_illegal_characters = ":<>|\"?*\r\n\x00"
|
||||
_windows_illegal_name_trans_table = str.maketrans(
|
||||
_windows_illegal_characters,
|
||||
"_" * len(_windows_illegal_characters)
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class PluginInfo(object):
|
||||
"""Plugin info structure for Harmony Deadline plugin."""
|
||||
|
||||
SceneFile = attr.ib()
|
||||
# Harmony version
|
||||
Version = attr.ib()
|
||||
|
||||
Camera = attr.ib(default="")
|
||||
FieldOfView = attr.ib(default=41.11)
|
||||
IsDatabase = attr.ib(default=False)
|
||||
ResolutionX = attr.ib(default=1920)
|
||||
ResolutionY = attr.ib(default=1080)
|
||||
|
||||
# Resolution name preset, default
|
||||
UsingResPreset = attr.ib(default=False)
|
||||
ResolutionName = attr.ib(default="HDTV_1080p24")
|
||||
|
||||
PreRenderInlineScript = attr.ib(default=None)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputNode = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputNode(self): # noqa: N802
|
||||
"""Return all output nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Node', 'Top/renderFarmDefault'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputNode):
|
||||
out["Output{}Node".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputNode.setter
|
||||
def OutputNode(self, val): # noqa: N802
|
||||
self._outputNode.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputType = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputType(self): # noqa: N802
|
||||
"""Return output nodes type formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Type', 'Image'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputType):
|
||||
out["Output{}Type".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputType.setter
|
||||
def OutputType(self, val): # noqa: N802
|
||||
self._outputType.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputLeadingZero = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputLeadingZero(self): # noqa: N802
|
||||
"""Return output nodes type formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0LeadingZero', '3'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputLeadingZero):
|
||||
out["Output{}LeadingZero".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputLeadingZero.setter
|
||||
def OutputLeadingZero(self, val): # noqa: N802
|
||||
self._outputLeadingZero.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputFormat = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputFormat(self): # noqa: N802
|
||||
"""Return output nodes format formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Type', 'PNG4'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputFormat):
|
||||
out["Output{}Format".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputFormat.setter
|
||||
def OutputFormat(self, val): # noqa: N802
|
||||
self._outputFormat.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputStartFrame = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputStartFrame(self): # noqa: N802
|
||||
"""Return start frame for output nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0StartFrame', '1'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputStartFrame):
|
||||
out["Output{}StartFrame".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputStartFrame.setter
|
||||
def OutputStartFrame(self, val): # noqa: N802
|
||||
self._outputStartFrame.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputPath = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputPath(self): # noqa: N802
|
||||
"""Return output paths for nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Path', '/output/path'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputPath):
|
||||
out["Output{}Path".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputPath.setter
|
||||
def OutputPath(self, val): # noqa: N802
|
||||
self._outputPath.append(val)
|
||||
|
||||
def set_output(self, node, image_format, output,
|
||||
output_type="Image", zeros=3, start_frame=1):
|
||||
"""Helper to set output.
|
||||
|
||||
This should be used instead of setting properties individually
|
||||
as so index remain consistent.
|
||||
|
||||
Args:
|
||||
node (str): harmony write node name
|
||||
image_format (str): format of output (PNG4, TIF, ...)
|
||||
output (str): output path
|
||||
output_type (str, optional): "Image" or "Movie" (not supported).
|
||||
zeros (int, optional): Leading zeros (for 0001 = 3)
|
||||
start_frame (int, optional): Sequence offset.
|
||||
|
||||
"""
|
||||
|
||||
self.OutputNode = node
|
||||
self.OutputFormat = image_format
|
||||
self.OutputPath = output
|
||||
self.OutputType = output_type
|
||||
self.OutputLeadingZero = zeros
|
||||
self.OutputStartFrame = start_frame
|
||||
|
||||
def serialize(self):
|
||||
"""Return all data serialized as dictionary.
|
||||
|
||||
Returns:
|
||||
OrderedDict: all serialized data.
|
||||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
if a.name.startswith("_"):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
serialized = attr.asdict(
|
||||
self, dict_factory=OrderedDict, filter=filter_data)
|
||||
serialized.update(self.OutputNode)
|
||||
serialized.update(self.OutputFormat)
|
||||
serialized.update(self.OutputPath)
|
||||
serialized.update(self.OutputType)
|
||||
serialized.update(self.OutputLeadingZero)
|
||||
serialized.update(self.OutputStartFrame)
|
||||
|
||||
return serialized
|
||||
|
||||
|
||||
class HarmonySubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
"""Submit render write of Harmony scene to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable ``DEADLINE_REST_URL``.
|
||||
|
||||
Note:
|
||||
If Deadline configuration is not detected, this plugin will
|
||||
be disabled.
|
||||
|
||||
Attributes:
|
||||
use_published (bool): Use published scene to render instead of the
|
||||
one in work area.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["harmony"]
|
||||
families = ["render.farm"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
optional = True
|
||||
use_published = False
|
||||
priority = 50
|
||||
chunk_size = 1000000
|
||||
group = "none"
|
||||
department = ""
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo("Harmony")
|
||||
job_info.Name = self._instance.data["name"]
|
||||
job_info.Plugin = "HarmonyAYON"
|
||||
job_info.Frames = "{}-{}".format(
|
||||
self._instance.data["frameStartHandle"],
|
||||
self._instance.data["frameEndHandle"]
|
||||
)
|
||||
# for now, get those from presets. Later on it should be
|
||||
# configurable in Harmony UI directly.
|
||||
job_info.Priority = self.priority
|
||||
job_info.Pool = self._instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
job_info.ChunkSize = self.chunk_size
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Department = self.department
|
||||
job_info.Group = self.group
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS"
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
return job_info
|
||||
|
||||
def _unzip_scene_file(self, published_scene: Path) -> Path:
|
||||
"""Unzip scene zip file to its directory.
|
||||
|
||||
Unzip scene file (if it is zip file) to its current directory and
|
||||
return path to xstage file there. Xstage file is determined by its
|
||||
name.
|
||||
|
||||
Args:
|
||||
published_scene (Path): path to zip file.
|
||||
|
||||
Returns:
|
||||
Path: The path to unzipped xstage.
|
||||
"""
|
||||
# if not zip, bail out.
|
||||
if "zip" not in published_scene.suffix or not is_zipfile(
|
||||
published_scene.as_posix()
|
||||
):
|
||||
self.log.error("Published scene is not in zip.")
|
||||
self.log.error(published_scene)
|
||||
raise AssertionError("invalid scene format")
|
||||
|
||||
xstage_path = (
|
||||
published_scene.parent
|
||||
/ published_scene.stem
|
||||
/ f"{published_scene.stem}.xstage"
|
||||
)
|
||||
unzip_dir = (published_scene.parent / published_scene.stem)
|
||||
with _ZipFile(published_scene, "r") as zip_ref:
|
||||
# UNC path (//?/) added to minimalize risk with extracting
|
||||
# to large file paths
|
||||
zip_ref.extractall("//?/" + str(unzip_dir.as_posix()))
|
||||
|
||||
# find any xstage files in directory, prefer the one with the same name
|
||||
# as directory (plus extension)
|
||||
xstage_files = []
|
||||
for scene in unzip_dir.iterdir():
|
||||
if scene.suffix == ".xstage":
|
||||
xstage_files.append(scene)
|
||||
|
||||
# there must be at least one (but maybe not more?) xstage file
|
||||
if not xstage_files:
|
||||
self.log.error("No xstage files found in zip")
|
||||
raise AssertionError("Invalid scene archive")
|
||||
|
||||
ideal_scene = False
|
||||
# find the one with the same name as zip. In case there can be more
|
||||
# then one xtage file.
|
||||
for scene in xstage_files:
|
||||
# if /foo/bar/baz.zip == /foo/bar/baz/baz.xstage
|
||||
# ^^^ ^^^
|
||||
if scene.stem == published_scene.stem:
|
||||
xstage_path = scene
|
||||
ideal_scene = True
|
||||
|
||||
# but sometimes xstage file has different name then zip - in that case
|
||||
# use that one.
|
||||
if not ideal_scene:
|
||||
xstage_path = xstage_files[0]
|
||||
return xstage_path
|
||||
|
||||
def get_plugin_info(self):
|
||||
# this is path to published scene workfile _ZIP_. Before
|
||||
# rendering, we need to unzip it.
|
||||
published_scene = Path(
|
||||
self.from_published_scene(False))
|
||||
self.log.debug(f"Processing {published_scene.as_posix()}")
|
||||
xstage_path = self._unzip_scene_file(published_scene)
|
||||
render_path = xstage_path.parent / "renders"
|
||||
|
||||
# for submit_publish job to create .json file in
|
||||
self._instance.data["outputDir"] = render_path
|
||||
new_expected_files = []
|
||||
render_path_str = str(render_path.as_posix())
|
||||
for file in self._instance.data["expectedFiles"]:
|
||||
_file = str(Path(file).as_posix())
|
||||
expected_dir_str = os.path.dirname(_file)
|
||||
new_expected_files.append(
|
||||
_file.replace(expected_dir_str, render_path_str)
|
||||
)
|
||||
audio_file = self._instance.data.get("audioFile")
|
||||
if audio_file:
|
||||
abs_path = xstage_path.parent / audio_file
|
||||
self._instance.context.data["audioFile"] = str(abs_path)
|
||||
|
||||
self._instance.data["source"] = str(published_scene.as_posix())
|
||||
self._instance.data["expectedFiles"] = new_expected_files
|
||||
harmony_plugin_info = PluginInfo(
|
||||
SceneFile=xstage_path.as_posix(),
|
||||
Version=(
|
||||
self._instance.context.data["harmonyVersion"].split(".")[0]),
|
||||
FieldOfView=self._instance.context.data["FOV"],
|
||||
ResolutionX=self._instance.data["resolutionWidth"],
|
||||
ResolutionY=self._instance.data["resolutionHeight"]
|
||||
)
|
||||
|
||||
pattern = '[0]{' + str(self._instance.data["leadingZeros"]) + \
|
||||
'}1\.[a-zA-Z]{3}'
|
||||
render_prefix = re.sub(pattern, '',
|
||||
self._instance.data["expectedFiles"][0])
|
||||
harmony_plugin_info.set_output(
|
||||
self._instance.data["setMembers"][0],
|
||||
self._instance.data["outputFormat"],
|
||||
render_prefix,
|
||||
self._instance.data["outputType"],
|
||||
self._instance.data["leadingZeros"],
|
||||
self._instance.data["outputStartFrame"]
|
||||
)
|
||||
|
||||
all_write_nodes = self._instance.context.data["all_write_nodes"]
|
||||
disable_nodes = []
|
||||
for node in all_write_nodes:
|
||||
# disable all other write nodes
|
||||
if node != self._instance.data["setMembers"][0]:
|
||||
disable_nodes.append("node.setEnable('{}', false)"
|
||||
.format(node))
|
||||
harmony_plugin_info.PreRenderInlineScript = ';'.join(disable_nodes)
|
||||
|
||||
return harmony_plugin_info.serialize()
|
||||
|
|
@ -1,181 +0,0 @@
|
|||
import os
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
from ayon_core.lib import (
|
||||
TextDef,
|
||||
NumberDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class HoudiniPluginInfo(object):
|
||||
Build = attr.ib(default=None)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
ScriptJob = attr.ib(default=True)
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
SaveFile = attr.ib(default=True)
|
||||
ScriptFilename = attr.ib(default=None)
|
||||
OutputDriver = attr.ib(default=None)
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
ProjectPath = attr.ib(default=None)
|
||||
|
||||
|
||||
class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, # noqa
|
||||
AYONPyblishPluginMixin):
|
||||
"""Submit Houdini scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["publish.hou"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
priority = 50
|
||||
chunk_size = 999999
|
||||
group = None
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="Houdini")
|
||||
|
||||
job_info.update(self.jobInfo)
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
assert all(
|
||||
result["success"] for result in context.data["results"]
|
||||
), "Errors found, aborting integration.."
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
filepath = context.data["currentFile"]
|
||||
scenename = os.path.basename(filepath)
|
||||
job_name = "{scene} - {instance} [PUBLISH]".format(
|
||||
scene=scenename, instance=instance.name)
|
||||
batch_name = "{code} - {scene}".format(code=project_name,
|
||||
scene=scenename)
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = job_name
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Plugin = instance.data["plugin"]
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
rop_node = self.get_rop_node(instance)
|
||||
if rop_node.type().name() != "alembic":
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
job_info.ChunkSize = instance.data.get("chunk_size", self.chunk_size)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.Group = attr_values.get("group", self.group)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
instance = self._instance
|
||||
version = hou.applicationVersionString()
|
||||
version = ".".join(version.split(".")[:2])
|
||||
rop = self.get_rop_node(instance)
|
||||
plugin_info = HoudiniPluginInfo(
|
||||
Build=None,
|
||||
IgnoreInputs=True,
|
||||
ScriptJob=True,
|
||||
SceneFile=self.scene_path,
|
||||
SaveFile=True,
|
||||
OutputDriver=rop.path(),
|
||||
Version=version,
|
||||
ProjectPath=os.path.dirname(self.scene_path)
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process(self, instance):
|
||||
super(HoudiniCacheSubmitDeadline, self).process(instance)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
def get_rop_node(self, instance):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
rop = instance.data.get("instance_node")
|
||||
rop_node = hou.node(rop)
|
||||
|
||||
return rop_node
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(HoudiniCacheSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,403 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import AYONPyblishPluginMixin
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
TextDef,
|
||||
NumberDef
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlinePluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
OutputDriver = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class ArnoldRenderDeadlinePluginInfo():
|
||||
InputFile = attr.ib(default=None)
|
||||
Verbose = attr.ib(default=4)
|
||||
|
||||
|
||||
@attr.s
|
||||
class MantraRenderDeadlinePluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class VrayRenderPluginInfo():
|
||||
InputFilename = attr.ib(default=None)
|
||||
SeparateFilesPerFrame = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class RedshiftRenderPluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
# Use "1" as the default Redshift version just because it
|
||||
# default fallback version in Deadline's Redshift plugin
|
||||
# if no version was specified
|
||||
Version = attr.ib(default="1")
|
||||
|
||||
|
||||
@attr.s
|
||||
class HuskStandalonePluginInfo():
|
||||
"""Requires Deadline Husk Standalone Plugin.
|
||||
See Deadline Plug-in:
|
||||
https://github.com/BigRoy/HuskStandaloneSubmitter
|
||||
Also see Husk options here:
|
||||
https://www.sidefx.com/docs/houdini/ref/utils/husk.html
|
||||
"""
|
||||
SceneFile = attr.ib()
|
||||
# TODO: Below parameters are only supported by custom version of the plugin
|
||||
Renderer = attr.ib(default=None)
|
||||
RenderSettings = attr.ib(default="/Render/rendersettings")
|
||||
Purpose = attr.ib(default="geometry,render")
|
||||
Complexity = attr.ib(default="veryhigh")
|
||||
Snapshot = attr.ib(default=-1)
|
||||
LogLevel = attr.ib(default="2")
|
||||
PreRender = attr.ib(default="")
|
||||
PreFrame = attr.ib(default="")
|
||||
PostFrame = attr.ib(default="")
|
||||
PostRender = attr.ib(default="")
|
||||
RestartDelegate = attr.ib(default="")
|
||||
Version = attr.ib(default="")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin
|
||||
):
|
||||
"""Submit Render ROPs to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE.
|
||||
|
||||
Target "local":
|
||||
Even though this does *not* render locally this is seen as
|
||||
a 'local' submission as it is the regular way of submitting
|
||||
a Houdini render locally.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
use_published = True
|
||||
|
||||
# presets
|
||||
export_priority = 50
|
||||
export_chunk_size = 10
|
||||
export_group = ""
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
group = ""
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
TextDef(
|
||||
"group",
|
||||
default=cls.group,
|
||||
label="Group Name"
|
||||
),
|
||||
NumberDef(
|
||||
"export_priority",
|
||||
label="Export Priority",
|
||||
default=cls.export_priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"export_chunk",
|
||||
label="Export Frames Per Task",
|
||||
default=cls.export_chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
TextDef(
|
||||
"export_group",
|
||||
default=cls.export_group,
|
||||
label="Export Group Name"
|
||||
),
|
||||
]
|
||||
|
||||
def get_job_info(self, dependency_job_ids=None):
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
# Whether Deadline render submission is being split in two
|
||||
# (extract + render)
|
||||
split_render_job = instance.data.get("splitRender")
|
||||
|
||||
# If there's some dependency job ids we can assume this is a render job
|
||||
# and not an export job
|
||||
is_export_job = True
|
||||
if dependency_job_ids:
|
||||
is_export_job = False
|
||||
|
||||
job_type = "[RENDER]"
|
||||
if split_render_job and not is_export_job:
|
||||
product_type = instance.data["productType"]
|
||||
plugin = {
|
||||
"usdrender": "HuskStandalone",
|
||||
}.get(product_type)
|
||||
if not plugin:
|
||||
# Convert from product type to Deadline plugin name
|
||||
# i.e., arnold_rop -> Arnold
|
||||
plugin = product_type.replace("_rop", "").capitalize()
|
||||
else:
|
||||
plugin = "Houdini"
|
||||
if split_render_job:
|
||||
job_type = "[EXPORT IFD]"
|
||||
|
||||
job_info = DeadlineJobInfo(Plugin=plugin)
|
||||
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
job_info.Name = "{} - {} {}".format(filename, instance.name, job_type)
|
||||
job_info.BatchName = filename
|
||||
|
||||
job_info.UserName = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
if is_in_tests():
|
||||
job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(start),
|
||||
end=int(end),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
# Make sure we make job frame dependent so render tasks pick up a soon
|
||||
# as export tasks are done
|
||||
if split_render_job and not is_export_job:
|
||||
job_info.IsFrameDependent = bool(instance.data.get(
|
||||
"splitRenderFrameDependent", True))
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
if split_render_job and is_export_job:
|
||||
job_info.Priority = attribute_values.get(
|
||||
"export_priority", self.export_priority
|
||||
)
|
||||
job_info.ChunkSize = attribute_values.get(
|
||||
"export_chunk", self.export_chunk_size
|
||||
)
|
||||
job_info.Group = self.export_group
|
||||
else:
|
||||
job_info.Priority = attribute_values.get(
|
||||
"priority", self.priority
|
||||
)
|
||||
job_info.ChunkSize = attribute_values.get(
|
||||
"chunk", self.chunk_size
|
||||
)
|
||||
job_info.Group = self.group
|
||||
|
||||
# Apply render globals, like e.g. data from collect machine list
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
if render_globals:
|
||||
self.log.debug("Applying 'renderGlobals' to job info: %s",
|
||||
render_globals)
|
||||
job_info.update(render_globals)
|
||||
|
||||
job_info.Comment = context.data.get("comment")
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
for i, filepath in enumerate(instance.data["files"]):
|
||||
dirname = os.path.dirname(filepath)
|
||||
fname = os.path.basename(filepath)
|
||||
job_info.OutputDirectory += dirname.replace("\\", "/")
|
||||
job_info.OutputFilename += fname
|
||||
|
||||
# Add dependencies if given
|
||||
if dependency_job_ids:
|
||||
job_info.JobDependencies = ",".join(dependency_job_ids)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self, job_type=None):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
hou_major_minor = hou.applicationVersionString().rsplit(".", 1)[0]
|
||||
|
||||
# Output driver to render
|
||||
if job_type == "render":
|
||||
product_type = instance.data.get("productType")
|
||||
if product_type == "arnold_rop":
|
||||
plugin_info = ArnoldRenderDeadlinePluginInfo(
|
||||
InputFile=instance.data["ifdFile"]
|
||||
)
|
||||
elif product_type == "mantra_rop":
|
||||
plugin_info = MantraRenderDeadlinePluginInfo(
|
||||
SceneFile=instance.data["ifdFile"],
|
||||
Version=hou_major_minor,
|
||||
)
|
||||
elif product_type == "vray_rop":
|
||||
plugin_info = VrayRenderPluginInfo(
|
||||
InputFilename=instance.data["ifdFile"],
|
||||
)
|
||||
elif product_type == "redshift_rop":
|
||||
plugin_info = RedshiftRenderPluginInfo(
|
||||
SceneFile=instance.data["ifdFile"]
|
||||
)
|
||||
# Note: To use different versions of Redshift on Deadline
|
||||
# set the `REDSHIFT_VERSION` env variable in the Tools
|
||||
# settings in the AYON Application plugin. You will also
|
||||
# need to set that version in `Redshift.param` file
|
||||
# of the Redshift Deadline plugin:
|
||||
# [Redshift_Executable_*]
|
||||
# where * is the version number.
|
||||
if os.getenv("REDSHIFT_VERSION"):
|
||||
plugin_info.Version = os.getenv("REDSHIFT_VERSION")
|
||||
else:
|
||||
self.log.warning((
|
||||
"REDSHIFT_VERSION env variable is not set"
|
||||
" - using version configured in Deadline"
|
||||
))
|
||||
|
||||
elif product_type == "usdrender":
|
||||
plugin_info = self._get_husk_standalone_plugin_info(
|
||||
instance, hou_major_minor)
|
||||
|
||||
else:
|
||||
self.log.error(
|
||||
"Product type '%s' not supported yet to split render job",
|
||||
product_type
|
||||
)
|
||||
return
|
||||
else:
|
||||
driver = hou.node(instance.data["instance_node"])
|
||||
plugin_info = DeadlinePluginInfo(
|
||||
SceneFile=context.data["currentFile"],
|
||||
OutputDriver=driver.path(),
|
||||
Version=hou_major_minor,
|
||||
IgnoreInputs=True
|
||||
)
|
||||
|
||||
return attr.asdict(plugin_info)
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data["farm"]:
|
||||
self.log.debug("Render on farm is disabled. "
|
||||
"Skipping deadline submission.")
|
||||
return
|
||||
|
||||
super(HoudiniSubmitDeadline, self).process(instance)
|
||||
|
||||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
def _get_husk_standalone_plugin_info(self, instance, hou_major_minor):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
# Supply additional parameters from the USD Render ROP
|
||||
# to the Husk Standalone Render Plug-in
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
snapshot_interval = -1
|
||||
if rop_node.evalParm("dosnapshot"):
|
||||
snapshot_interval = rop_node.evalParm("snapshotinterval")
|
||||
|
||||
restart_delegate = 0
|
||||
if rop_node.evalParm("husk_restartdelegate"):
|
||||
restart_delegate = rop_node.evalParm("husk_restartdelegateframes")
|
||||
|
||||
rendersettings = (
|
||||
rop_node.evalParm("rendersettings")
|
||||
or "/Render/rendersettings"
|
||||
)
|
||||
return HuskStandalonePluginInfo(
|
||||
SceneFile=instance.data["ifdFile"],
|
||||
Renderer=rop_node.evalParm("renderer"),
|
||||
RenderSettings=rendersettings,
|
||||
Purpose=rop_node.evalParm("husk_purpose"),
|
||||
Complexity=rop_node.evalParm("husk_complexity"),
|
||||
Snapshot=snapshot_interval,
|
||||
PreRender=rop_node.evalParm("husk_prerender"),
|
||||
PreFrame=rop_node.evalParm("husk_preframe"),
|
||||
PostFrame=rop_node.evalParm("husk_postframe"),
|
||||
PostRender=rop_node.evalParm("husk_postrender"),
|
||||
RestartDelegate=restart_delegate,
|
||||
Version=hou_major_minor
|
||||
)
|
||||
|
||||
|
||||
class HoudiniSubmitDeadlineUsdRender(HoudiniSubmitDeadline):
|
||||
# Do not use published workfile paths for USD Render ROP because the
|
||||
# Export Job doesn't seem to occur using the published path either, so
|
||||
# output paths then do not match the actual rendered paths
|
||||
use_published = False
|
||||
families = ["usdrender"]
|
||||
|
|
@ -1,431 +0,0 @@
|
|||
import os
|
||||
import getpass
|
||||
import copy
|
||||
import attr
|
||||
|
||||
from ayon_core.lib import (
|
||||
TextDef,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
from ayon_max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
)
|
||||
from ayon_max.api.lib_rendersettings import RenderSettings
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class MaxPluginInfo(object):
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
SaveFile = attr.ib(default=True)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
|
||||
|
||||
class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["max"]
|
||||
families = ["maxrender"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
use_published = True
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
settings = project_settings["deadline"]["publish"]["MaxSubmitDeadline"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
cls.use_published = settings.get("use_published",
|
||||
cls.use_published)
|
||||
cls.priority = settings.get("priority",
|
||||
cls.priority)
|
||||
cls.chuck_size = settings.get("chunk_size", cls.chunk_size)
|
||||
cls.group = settings.get("group", cls.group)
|
||||
# TODO: multiple camera instance, separate job infos
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="3dsmax")
|
||||
|
||||
# todo: test whether this works for existing production cases
|
||||
# where custom jobInfo was stored in the project settings
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
job_info.Name = "%s - %s" % (src_filename, instance.name)
|
||||
job_info.BatchName = src_filename
|
||||
job_info.Plugin = instance.data["plugin"]
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
job_info.EnableAutoTimeout = True
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}".format(
|
||||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"])
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
job_info.ChunkSize = attr_values.get("chunkSize", 1)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.Group = attr_values.get("group", self.group)
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
if not instance.data.get("multiCamera"):
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in self._iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
instance = self._instance
|
||||
|
||||
plugin_info = MaxPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=instance.data["maxversion"],
|
||||
SaveFile=True,
|
||||
IgnoreInputs=True
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
|
||||
instance = self._instance
|
||||
filepath = instance.context.data["currentFile"]
|
||||
|
||||
files = instance.data["expectedFiles"]
|
||||
if not files:
|
||||
raise KnownPublishError("No Render Elements found!")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
payload_data = {
|
||||
"filename": filename,
|
||||
"dirname": output_dir
|
||||
}
|
||||
|
||||
self.log.debug("Submitting 3dsMax render..")
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if instance.data.get("multiCamera"):
|
||||
self.log.debug("Submitting jobs for multiple cameras..")
|
||||
payload = self._use_published_name_for_multiples(
|
||||
payload_data, project_settings)
|
||||
job_infos, plugin_infos = payload
|
||||
for job_info, plugin_info in zip(job_infos, plugin_infos):
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
else:
|
||||
payload = self._use_published_name(payload_data, project_settings)
|
||||
job_info, plugin_info = payload
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
|
||||
def _use_published_name(self, data, project_settings):
|
||||
# Not all hosts can import these modules.
|
||||
from ayon_max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
)
|
||||
from ayon_max.api.lib_rendersettings import RenderSettings
|
||||
|
||||
instance = self._instance
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
plugin_data = {}
|
||||
|
||||
multipass = get_multipass_setting(project_settings)
|
||||
if multipass:
|
||||
plugin_data["DisableMultipass"] = 0
|
||||
else:
|
||||
plugin_data["DisableMultipass"] = 1
|
||||
|
||||
files = instance.data.get("expectedFiles")
|
||||
if not files:
|
||||
raise KnownPublishError("No render elements found")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
old_output_dir = os.path.dirname(first_file)
|
||||
output_beauty = RenderSettings().get_render_output(instance.name,
|
||||
old_output_dir)
|
||||
rgb_bname = os.path.basename(output_beauty)
|
||||
dir = os.path.dirname(first_file)
|
||||
beauty_name = f"{dir}/{rgb_bname}"
|
||||
beauty_name = beauty_name.replace("\\", "/")
|
||||
plugin_data["RenderOutput"] = beauty_name
|
||||
# as 3dsmax has version with different languages
|
||||
plugin_data["Language"] = "ENU"
|
||||
|
||||
renderer_class = get_current_renderer()
|
||||
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"Redshift_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_elem_list = RenderSettings().get_render_element()
|
||||
for i, element in enumerate(render_elem_list):
|
||||
elem_bname = os.path.basename(element)
|
||||
new_elem = f"{dir}/{elem_bname}"
|
||||
new_elem = new_elem.replace("/", "\\")
|
||||
plugin_data["RenderElementOutputFilename%d" % i] = new_elem # noqa
|
||||
|
||||
if renderer == "Redshift_Renderer":
|
||||
plugin_data["redshift_SeparateAovFiles"] = instance.data.get(
|
||||
"separateAovFiles")
|
||||
if instance.data["cameras"]:
|
||||
camera = instance.data["cameras"][0]
|
||||
plugin_info["Camera0"] = camera
|
||||
plugin_info["Camera"] = camera
|
||||
plugin_info["Camera1"] = camera
|
||||
self.log.debug("plugin data:{}".format(plugin_data))
|
||||
plugin_info.update(plugin_data)
|
||||
|
||||
return job_info, plugin_info
|
||||
|
||||
def get_job_info_through_camera(self, camera):
|
||||
"""Get the job parameters for deadline submission when
|
||||
multi-camera is enabled.
|
||||
Args:
|
||||
infos(dict): a dictionary with job info.
|
||||
"""
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
exp = instance.data.get("expectedFiles")
|
||||
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
job_info.Name = "%s - %s - %s" % (
|
||||
src_filename, instance.name, camera)
|
||||
for filepath in self._iter_expected_files(exp):
|
||||
if camera not in filepath:
|
||||
continue
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
# set the output filepath with the relative camera
|
||||
|
||||
def get_plugin_info_through_camera(self, camera):
|
||||
"""Get the plugin parameters for deadline submission when
|
||||
multi-camera is enabled.
|
||||
Args:
|
||||
infos(dict): a dictionary with plugin info.
|
||||
"""
|
||||
instance = self._instance
|
||||
# set the target camera
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
|
||||
plugin_data = {}
|
||||
# set the output filepath with the relative camera
|
||||
if instance.data.get("multiCamera"):
|
||||
scene_filepath = instance.context.data["currentFile"]
|
||||
scene_filename = os.path.basename(scene_filepath)
|
||||
scene_directory = os.path.dirname(scene_filepath)
|
||||
current_filename, ext = os.path.splitext(scene_filename)
|
||||
camera_scene_name = f"{current_filename}_{camera}{ext}"
|
||||
camera_scene_filepath = os.path.join(
|
||||
scene_directory, f"_{current_filename}", camera_scene_name)
|
||||
plugin_data["SceneFile"] = camera_scene_filepath
|
||||
|
||||
files = instance.data.get("expectedFiles")
|
||||
if not files:
|
||||
raise KnownPublishError("No render elements found")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
old_output_dir = os.path.dirname(first_file)
|
||||
rgb_output = RenderSettings().get_batch_render_output(camera) # noqa
|
||||
rgb_bname = os.path.basename(rgb_output)
|
||||
dir = os.path.dirname(first_file)
|
||||
beauty_name = f"{dir}/{rgb_bname}"
|
||||
beauty_name = beauty_name.replace("\\", "/")
|
||||
plugin_info["RenderOutput"] = beauty_name
|
||||
renderer_class = get_current_renderer()
|
||||
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"Redshift_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_elem_list = RenderSettings().get_batch_render_elements(
|
||||
instance.name, old_output_dir, camera
|
||||
)
|
||||
for i, element in enumerate(render_elem_list):
|
||||
if camera in element:
|
||||
elem_bname = os.path.basename(element)
|
||||
new_elem = f"{dir}/{elem_bname}"
|
||||
new_elem = new_elem.replace("/", "\\")
|
||||
plugin_info["RenderElementOutputFilename%d" % i] = new_elem # noqa
|
||||
|
||||
if camera:
|
||||
# set the default camera and target camera
|
||||
# (weird parameters from max)
|
||||
plugin_data["Camera"] = camera
|
||||
plugin_data["Camera1"] = camera
|
||||
plugin_data["Camera0"] = None
|
||||
|
||||
plugin_info.update(plugin_data)
|
||||
return plugin_info
|
||||
|
||||
def _use_published_name_for_multiples(self, data, project_settings):
|
||||
"""Process the parameters submission for deadline when
|
||||
user enables multi-cameras option.
|
||||
Args:
|
||||
job_info_list (list): A list of multiple job infos
|
||||
plugin_info_list (list): A list of multiple plugin infos
|
||||
"""
|
||||
job_info_list = []
|
||||
plugin_info_list = []
|
||||
instance = self._instance
|
||||
cameras = instance.data.get("cameras", [])
|
||||
plugin_data = {}
|
||||
multipass = get_multipass_setting(project_settings)
|
||||
if multipass:
|
||||
plugin_data["DisableMultipass"] = 0
|
||||
else:
|
||||
plugin_data["DisableMultipass"] = 1
|
||||
for cam in cameras:
|
||||
job_info = self.get_job_info_through_camera(cam)
|
||||
plugin_info = self.get_plugin_info_through_camera(cam)
|
||||
plugin_info.update(plugin_data)
|
||||
job_info_list.append(job_info)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return job_info_list, plugin_info_list
|
||||
|
||||
def from_published_scene(self, replace_in_path=True):
|
||||
instance = self._instance
|
||||
if instance.data["renderer"] == "Redshift_Renderer":
|
||||
self.log.debug("Using Redshift...published scene wont be used..")
|
||||
replace_in_path = False
|
||||
return replace_with_published_scene_path(
|
||||
instance, replace_in_path)
|
||||
|
||||
@staticmethod
|
||||
def _iter_expected_files(exp):
|
||||
if isinstance(exp[0], dict):
|
||||
for _aov, files in exp[0].items():
|
||||
for file in files:
|
||||
yield file
|
||||
else:
|
||||
for file in exp:
|
||||
yield file
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MaxSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("use_published",
|
||||
default=cls.use_published,
|
||||
label="Use Published Scene"),
|
||||
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
|
||||
NumberDef("chunkSize",
|
||||
minimum=1,
|
||||
maximum=50,
|
||||
decimals=0,
|
||||
default=cls.chunk_size,
|
||||
label="Frame Per Task"),
|
||||
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,935 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline.
|
||||
|
||||
This module is taking care of submitting job from Maya to Deadline. It
|
||||
creates job and set correct environments. Its behavior is controlled by
|
||||
``DEADLINE_REST_URL`` environment variable - pointing to Deadline Web Service
|
||||
and :data:`MayaSubmitDeadline.use_published` property telling Deadline to
|
||||
use published scene workfile or not.
|
||||
|
||||
If ``vrscene`` or ``assscene`` are detected in families, it will first
|
||||
submit job to export these files and then dependent job to render them.
|
||||
|
||||
Attributes:
|
||||
payload_skeleton (dict): Skeleton payload data sent as job to Deadline.
|
||||
Default values are for ``MayaBatch`` plugin.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import json
|
||||
import getpass
|
||||
import copy
|
||||
import re
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from collections import OrderedDict
|
||||
|
||||
import attr
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_maya.api.lib_rendersettings import RenderSettings
|
||||
from ayon_maya.api.lib import get_attr_in_layer
|
||||
|
||||
from ayon_core.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
def _validate_deadline_bool_value(instance, attribute, value):
|
||||
if not isinstance(value, (str, bool)):
|
||||
raise TypeError(
|
||||
"Attribute {} must be str or bool.".format(attribute))
|
||||
if value not in {"1", "0", True, False}:
|
||||
raise ValueError(
|
||||
("Value of {} must be one of "
|
||||
"'0', '1', True, False").format(attribute)
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class MayaPluginInfo(object):
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
OutputFilePath = attr.ib(default=None) # Output directory and filename
|
||||
OutputFilePrefix = attr.ib(default=None)
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
UsingRenderLayers = attr.ib(default=True)
|
||||
RenderLayer = attr.ib(default=None) # Render only this layer
|
||||
Renderer = attr.ib(default=None)
|
||||
ProjectPath = attr.ib(default=None) # Resolve relative references
|
||||
# Include all lights flag
|
||||
RenderSetupIncludeLights = attr.ib(
|
||||
default="1", validator=_validate_deadline_bool_value)
|
||||
StrictErrorChecking = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class PythonPluginInfo(object):
|
||||
ScriptFile = attr.ib()
|
||||
Version = attr.ib(default="3.6")
|
||||
Arguments = attr.ib(default=None)
|
||||
SingleFrameOnly = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class VRayPluginInfo(object):
|
||||
InputFilename = attr.ib(default=None) # Input
|
||||
SeparateFilesPerFrame = attr.ib(default=None)
|
||||
VRayEngine = attr.ib(default="V-Ray")
|
||||
Width = attr.ib(default=None)
|
||||
Height = attr.ib(default=None) # Mandatory for Deadline
|
||||
OutputFilePath = attr.ib(default=True)
|
||||
OutputFileName = attr.ib(default=None) # Render only this layer
|
||||
|
||||
|
||||
@attr.s
|
||||
class ArnoldPluginInfo(object):
|
||||
ArnoldFile = attr.ib(default=None)
|
||||
|
||||
|
||||
class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
tile_assembler_plugin = "OpenPypeTileAssembler"
|
||||
priority = 50
|
||||
tile_priority = 50
|
||||
limit = [] # limit groups
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = "none"
|
||||
strict_error_checking = True
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
cls.asset_dependencies = settings.get("asset_dependencies",
|
||||
cls.asset_dependencies)
|
||||
cls.import_reference = settings.get("import_reference",
|
||||
cls.import_reference)
|
||||
cls.use_published = settings.get("use_published", cls.use_published)
|
||||
cls.priority = settings.get("priority", cls.priority)
|
||||
cls.tile_priority = settings.get("tile_priority", cls.tile_priority)
|
||||
cls.limit = settings.get("limit", cls.limit)
|
||||
cls.group = settings.get("group", cls.group)
|
||||
cls.strict_error_checking = settings.get("strict_error_checking",
|
||||
cls.strict_error_checking)
|
||||
job_info = settings.get("jobInfo")
|
||||
if job_info:
|
||||
job_info = json.loads(job_info)
|
||||
plugin_info = settings.get("pluginInfo")
|
||||
if plugin_info:
|
||||
plugin_info = json.loads(plugin_info)
|
||||
|
||||
cls.jobInfo = job_info or cls.jobInfo
|
||||
cls.pluginInfo = plugin_info or cls.pluginInfo
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="MayaBatch")
|
||||
|
||||
# todo: test whether this works for existing production cases
|
||||
# where custom jobInfo was stored in the project settings
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
|
||||
if is_in_tests():
|
||||
src_filename += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = "%s - %s" % (src_filename, instance.name)
|
||||
job_info.BatchName = src_filename
|
||||
job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch")
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = instance.data.get("priority", self.priority)
|
||||
|
||||
if self.group != "none" and self.group:
|
||||
job_info.Group = self.group
|
||||
|
||||
if self.limit:
|
||||
job_info.LimitGroups = ",".join(self.limit)
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
render_globals = instance.data.setdefault("renderGlobals", dict())
|
||||
machine_list = attr_values.get("machineList", "")
|
||||
if machine_list:
|
||||
if attr_values.get("whitelist", True):
|
||||
machine_list_key = "Whitelist"
|
||||
else:
|
||||
machine_list_key = "Blacklist"
|
||||
render_globals[machine_list_key] = machine_list
|
||||
|
||||
job_info.Priority = attr_values.get("priority")
|
||||
job_info.ChunkSize = attr_values.get("chunkSize")
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Adding file dependencies.
|
||||
if not is_in_tests() and self.asset_dependencies:
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
for dependency in dependencies:
|
||||
job_info.AssetDependency += dependency
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Set it to default Maya behaviour if it cannot be determined
|
||||
# from instance (but it should be, by the Collector).
|
||||
|
||||
default_rs_include_lights = (
|
||||
instance.context.data['project_settings']
|
||||
['maya']
|
||||
['render_settings']
|
||||
['enable_all_lights']
|
||||
)
|
||||
|
||||
rs_include_lights = instance.data.get(
|
||||
"renderSetupIncludeLights", default_rs_include_lights)
|
||||
if rs_include_lights not in {"1", "0", True, False}:
|
||||
rs_include_lights = default_rs_include_lights
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
strict_error_checking = attr_values.get("strict_error_checking",
|
||||
self.strict_error_checking)
|
||||
plugin_info = MayaPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=cmds.about(version=True),
|
||||
RenderLayer=instance.data['setMembers'],
|
||||
Renderer=instance.data["renderer"],
|
||||
RenderSetupIncludeLights=rs_include_lights, # noqa
|
||||
ProjectPath=context.data["workspaceDir"],
|
||||
UsingRenderLayers=True,
|
||||
StrictErrorChecking=strict_error_checking
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
filepath = self.scene_path # publish if `use_publish` else workfile
|
||||
|
||||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
expected_files = instance.data["expectedFiles"]
|
||||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
# Patch workfile (only when use_published is enabled)
|
||||
if self.use_published:
|
||||
self._patch_workfile()
|
||||
|
||||
# Gather needed data ------------------------------------------------
|
||||
filename = os.path.basename(filepath)
|
||||
dirname = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
|
||||
# Fill in common data to payload ------------------------------------
|
||||
# TODO: Replace these with collected data from CollectRender
|
||||
payload_data = {
|
||||
"filename": filename,
|
||||
"dirname": dirname,
|
||||
}
|
||||
|
||||
# Submit preceding export jobs -------------------------------------
|
||||
export_job = None
|
||||
assert not all(x in instance.data["families"]
|
||||
for x in ['vrayscene', 'assscene']), (
|
||||
"Vray Scene and Ass Scene options are mutually exclusive")
|
||||
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
export_job = self.submit(vray_export_payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
else:
|
||||
self.log.debug("Submitting MayaBatch render..")
|
||||
payload = self._get_maya_payload(payload_data)
|
||||
|
||||
# Add export job as dependency --------------------------------------
|
||||
if export_job:
|
||||
job_info, _ = payload
|
||||
job_info.JobDependencies = export_job
|
||||
|
||||
if instance.data.get("tileRendering"):
|
||||
# Prepare tiles data
|
||||
self._tile_render(payload)
|
||||
else:
|
||||
# Submit main render job
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
def _tile_render(self, payload):
|
||||
"""Submit as tile render per frame with dependent assembly jobs."""
|
||||
|
||||
# As collected by super process()
|
||||
instance = self._instance
|
||||
|
||||
payload_job_info, payload_plugin_info = payload
|
||||
job_info = copy.deepcopy(payload_job_info)
|
||||
plugin_info = copy.deepcopy(payload_plugin_info)
|
||||
|
||||
# Force plugin reload for vray cause the region does not get flushed
|
||||
# between tile renders.
|
||||
if plugin_info["Renderer"] == "vray":
|
||||
job_info.ForceReloadPlugin = True
|
||||
|
||||
# if we have sequence of files, we need to create tile job for
|
||||
# every frame
|
||||
job_info.TileJob = True
|
||||
job_info.TileJobTilesInX = instance.data.get("tilesX")
|
||||
job_info.TileJobTilesInY = instance.data.get("tilesY")
|
||||
|
||||
tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY
|
||||
|
||||
plugin_info["ImageHeight"] = instance.data.get("resolutionHeight")
|
||||
plugin_info["ImageWidth"] = instance.data.get("resolutionWidth")
|
||||
plugin_info["RegionRendering"] = True
|
||||
|
||||
R_FRAME_NUMBER = re.compile(
|
||||
r".+\.(?P<frame>[0-9]+)\..+") # noqa: N806, E501
|
||||
REPL_FRAME_NUMBER = re.compile(
|
||||
r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501
|
||||
|
||||
exp = instance.data["expectedFiles"]
|
||||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
# get files from `beauty`
|
||||
files = exp[0].get("beauty")
|
||||
# assembly files are used for assembly jobs as we need to put
|
||||
# together all AOVs
|
||||
assembly_files = list(
|
||||
itertools.chain.from_iterable(
|
||||
[f for _, f in exp[0].items()]))
|
||||
if not files:
|
||||
# if beauty doesn't exist, use first aov we found
|
||||
files = exp[0].get(list(exp[0].keys())[0])
|
||||
else:
|
||||
files = exp
|
||||
assembly_files = files
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
|
||||
# Define frame tile jobs
|
||||
frame_file_hash = {}
|
||||
frame_payloads = {}
|
||||
file_index = 1
|
||||
for file in files:
|
||||
frame = re.search(R_FRAME_NUMBER, file).group("frame")
|
||||
|
||||
new_job_info = copy.deepcopy(job_info)
|
||||
new_job_info.Name += " (Frame {} - {} tiles)".format(frame,
|
||||
tiles_count)
|
||||
new_job_info.TileJobFrame = frame
|
||||
|
||||
new_plugin_info = copy.deepcopy(plugin_info)
|
||||
|
||||
# Add tile data into job info and plugin info
|
||||
tiles_data = _format_tiles(
|
||||
file, 0,
|
||||
instance.data.get("tilesX"),
|
||||
instance.data.get("tilesY"),
|
||||
instance.data.get("resolutionWidth"),
|
||||
instance.data.get("resolutionHeight"),
|
||||
payload_plugin_info["OutputFilePrefix"]
|
||||
)[0]
|
||||
|
||||
new_job_info.update(tiles_data["JobInfo"])
|
||||
new_plugin_info.update(tiles_data["PluginInfo"])
|
||||
|
||||
self.log.debug("hashing {} - {}".format(file_index, file))
|
||||
job_hash = hashlib.sha256(
|
||||
("{}_{}".format(file_index, file)).encode("utf-8"))
|
||||
|
||||
file_hash = job_hash.hexdigest()
|
||||
frame_file_hash[frame] = file_hash
|
||||
|
||||
new_job_info.ExtraInfo[0] = file_hash
|
||||
new_job_info.ExtraInfo[1] = file
|
||||
|
||||
frame_payloads[frame] = self.assemble_payload(
|
||||
job_info=new_job_info,
|
||||
plugin_info=new_plugin_info
|
||||
)
|
||||
file_index += 1
|
||||
|
||||
self.log.debug(
|
||||
"Submitting tile job(s) [{}] ...".format(len(frame_payloads)))
|
||||
|
||||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(
|
||||
tile_job_payload, auth, verify)
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
assembly_job_info = copy.deepcopy(job_info)
|
||||
assembly_job_info.Plugin = self.tile_assembler_plugin
|
||||
assembly_job_info.Name += " - Tile Assembly Job"
|
||||
assembly_job_info.Frames = 1
|
||||
assembly_job_info.MachineLimit = 1
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
assembly_job_info.Priority = attr_values.get("tile_priority",
|
||||
self.tile_priority)
|
||||
assembly_job_info.TileJob = False
|
||||
|
||||
# TODO: This should be a new publisher attribute definition
|
||||
pool = instance.context.data["project_settings"]["deadline"]
|
||||
pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"]
|
||||
assembly_job_info.Pool = pool or instance.data.get("primaryPool", "")
|
||||
|
||||
assembly_plugin_info = {
|
||||
"CleanupTiles": 1,
|
||||
"ErrorOnMissing": True,
|
||||
"Renderer": self._instance.data["renderer"]
|
||||
}
|
||||
|
||||
assembly_payloads = []
|
||||
output_dir = self.job_info.OutputDirectory[0]
|
||||
config_files = []
|
||||
for file in assembly_files:
|
||||
frame = re.search(R_FRAME_NUMBER, file).group("frame")
|
||||
|
||||
frame_assembly_job_info = copy.deepcopy(assembly_job_info)
|
||||
frame_assembly_job_info.Name += " (Frame {})".format(frame)
|
||||
frame_assembly_job_info.OutputFilename[0] = re.sub(
|
||||
REPL_FRAME_NUMBER,
|
||||
"\\1{}\\3".format("#" * len(frame)), file)
|
||||
|
||||
file_hash = frame_file_hash[frame]
|
||||
tile_job_id = frame_tile_job_id[frame]
|
||||
|
||||
frame_assembly_job_info.ExtraInfo[0] = file_hash
|
||||
frame_assembly_job_info.ExtraInfo[1] = file
|
||||
frame_assembly_job_info.JobDependencies = tile_job_id
|
||||
frame_assembly_job_info.Frames = frame
|
||||
|
||||
# write assembly job config files
|
||||
config_file = os.path.join(
|
||||
output_dir,
|
||||
"{}_config_{}.txt".format(
|
||||
os.path.splitext(file)[0],
|
||||
datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
|
||||
)
|
||||
)
|
||||
config_files.append(config_file)
|
||||
try:
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
except OSError:
|
||||
# directory is not available
|
||||
self.log.warning("Path is unreachable: "
|
||||
"`{}`".format(output_dir))
|
||||
|
||||
with open(config_file, "w") as cf:
|
||||
print("TileCount={}".format(tiles_count), file=cf)
|
||||
print("ImageFileName={}".format(file), file=cf)
|
||||
print("ImageWidth={}".format(
|
||||
instance.data.get("resolutionWidth")), file=cf)
|
||||
print("ImageHeight={}".format(
|
||||
instance.data.get("resolutionHeight")), file=cf)
|
||||
|
||||
reversed_y = False
|
||||
if plugin_info["Renderer"] == "arnold":
|
||||
reversed_y = True
|
||||
|
||||
with open(config_file, "a") as cf:
|
||||
# Need to reverse the order of the y tiles, because image
|
||||
# coordinates are calculated from bottom left corner.
|
||||
tiles = _format_tiles(
|
||||
file, 0,
|
||||
instance.data.get("tilesX"),
|
||||
instance.data.get("tilesY"),
|
||||
instance.data.get("resolutionWidth"),
|
||||
instance.data.get("resolutionHeight"),
|
||||
payload_plugin_info["OutputFilePrefix"],
|
||||
reversed_y=reversed_y
|
||||
)[1]
|
||||
for k, v in sorted(tiles.items()):
|
||||
print("{}={}".format(k, v), file=cf)
|
||||
|
||||
assembly_payloads.append(
|
||||
self.assemble_payload(
|
||||
job_info=frame_assembly_job_info,
|
||||
plugin_info=assembly_plugin_info.copy(),
|
||||
# This would fail if the client machine and webserice are
|
||||
# using different storage paths.
|
||||
aux_files=[config_file]
|
||||
)
|
||||
)
|
||||
|
||||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
num_assemblies = len(assembly_payloads)
|
||||
for i, payload in enumerate(assembly_payloads):
|
||||
self.log.debug(
|
||||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
assembly_job_id = self.submit(
|
||||
payload,
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
instance.data["assemblySubmissionJobs"] = assembly_job_ids
|
||||
|
||||
# Remove config files to avoid confusion about where data is coming
|
||||
# from in Deadline.
|
||||
for config_file in config_files:
|
||||
os.remove(config_file)
|
||||
|
||||
def _get_maya_payload(self, data):
|
||||
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
|
||||
if not is_in_tests() and self.asset_dependencies:
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
job_info.AssetDependency += self.scene_path
|
||||
|
||||
# Get layer prefix
|
||||
renderlayer = self._instance.data["setMembers"]
|
||||
renderer = self._instance.data["renderer"]
|
||||
layer_prefix_attr = RenderSettings.get_image_prefix_attr(renderer)
|
||||
layer_prefix = get_attr_in_layer(layer_prefix_attr, layer=renderlayer)
|
||||
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
plugin_info.update({
|
||||
# Output directory and filename
|
||||
"OutputFilePath": data["dirname"].replace("\\", "/"),
|
||||
"OutputFilePrefix": layer_prefix,
|
||||
})
|
||||
|
||||
# This hack is here because of how Deadline handles Renderman version.
|
||||
# it considers everything with `renderman` set as version older than
|
||||
# Renderman 22, and so if we are using renderman > 21 we need to set
|
||||
# renderer string on the job to `renderman22`. We will have to change
|
||||
# this when Deadline releases new version handling this.
|
||||
renderer = self._instance.data["renderer"]
|
||||
if renderer == "renderman":
|
||||
try:
|
||||
from rfm2.config import cfg # noqa
|
||||
except ImportError:
|
||||
raise Exception("Cannot determine renderman version")
|
||||
|
||||
rman_version = cfg().build_info.version() # type: str
|
||||
if int(rman_version.split(".")[0]) > 22:
|
||||
renderer = "renderman22"
|
||||
|
||||
plugin_info["Renderer"] = renderer
|
||||
|
||||
# this is needed because renderman plugin in Deadline
|
||||
# handles directory and file prefixes separately
|
||||
plugin_info["OutputFilePath"] = job_info.OutputDirectory[0]
|
||||
|
||||
return job_info, plugin_info
|
||||
|
||||
def _get_vray_export_payload(self, data):
|
||||
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Export")
|
||||
|
||||
# Get V-Ray settings info to compute output path
|
||||
vray_scene = self.format_vray_output_filename()
|
||||
|
||||
plugin_info = {
|
||||
"Renderer": "vray",
|
||||
"SkipExistingFrames": True,
|
||||
"UseLegacyRenderLayers": True,
|
||||
"OutputFilePath": os.path.dirname(vray_scene)
|
||||
}
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_vray_render_payload(self, data):
|
||||
|
||||
# Job Info
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Render")
|
||||
job_info.Plugin = "Vray"
|
||||
job_info.OverrideTaskExtraInfoNames = False
|
||||
|
||||
# Plugin Info
|
||||
plugin_info = VRayPluginInfo(
|
||||
InputFilename=self.format_vray_output_filename(),
|
||||
SeparateFilesPerFrame=False,
|
||||
VRayEngine="V-Ray",
|
||||
Width=self._instance.data["resolutionWidth"],
|
||||
Height=self._instance.data["resolutionHeight"],
|
||||
OutputFilePath=job_info.OutputDirectory[0],
|
||||
OutputFileName=job_info.OutputFilename[0]
|
||||
)
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_arnold_render_payload(self, data):
|
||||
# Job Info
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Render")
|
||||
job_info.Plugin = "Arnold"
|
||||
job_info.OverrideTaskExtraInfoNames = False
|
||||
|
||||
# Plugin Info
|
||||
ass_file, _ = os.path.splitext(data["output_filename_0"])
|
||||
ass_filepath = ass_file + ".ass"
|
||||
|
||||
plugin_info = ArnoldPluginInfo(
|
||||
ArnoldFile=ass_filepath
|
||||
)
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def format_vray_output_filename(self):
|
||||
"""Format the expected output file of the Export job.
|
||||
|
||||
Example:
|
||||
<Scene>/<Scene>_<Layer>/<Layer>
|
||||
"shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene"
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
from maya import cmds
|
||||
# "vrayscene/<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
vray_settings = cmds.ls(type="VRaySettingsNode")
|
||||
node = vray_settings[0]
|
||||
template = cmds.getAttr("{}.vrscene_filename".format(node))
|
||||
scene, _ = os.path.splitext(self.scene_path)
|
||||
|
||||
def smart_replace(string, key_values):
|
||||
new_string = string
|
||||
for key, value in key_values.items():
|
||||
new_string = new_string.replace(key, value)
|
||||
return new_string
|
||||
|
||||
# Get workfile scene path without extension to format vrscene_filename
|
||||
scene_filename = os.path.basename(self.scene_path)
|
||||
scene_filename_no_ext, _ = os.path.splitext(scene_filename)
|
||||
|
||||
layer = self._instance.data['setMembers']
|
||||
|
||||
# Reformat without tokens
|
||||
output_path = smart_replace(
|
||||
template,
|
||||
{"<Scene>": scene_filename_no_ext,
|
||||
"<Layer>": layer})
|
||||
|
||||
start_frame = int(self._instance.data["frameStartHandle"])
|
||||
workspace = self._instance.context.data["workspace"]
|
||||
filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame)
|
||||
filepath_zero = os.path.join(workspace, filename_zero)
|
||||
|
||||
return filepath_zero.replace("\\", "/")
|
||||
|
||||
def _patch_workfile(self):
|
||||
"""Patch Maya scene.
|
||||
|
||||
This will take list of patches (lines to add) and apply them to
|
||||
*published* Maya scene file (that is used later for rendering).
|
||||
|
||||
Patches are dict with following structure::
|
||||
{
|
||||
"name": "Name of patch",
|
||||
"regex": "regex of line before patch",
|
||||
"line": "line to insert"
|
||||
}
|
||||
|
||||
"""
|
||||
project_settings = self._instance.context.data["project_settings"]
|
||||
patches = (
|
||||
project_settings.get(
|
||||
"deadline", {}).get(
|
||||
"publish", {}).get(
|
||||
"MayaSubmitDeadline", {}).get(
|
||||
"scene_patches", {})
|
||||
)
|
||||
if not patches:
|
||||
return
|
||||
|
||||
if not os.path.splitext(self.scene_path)[1].lower() != ".ma":
|
||||
self.log.debug("Skipping workfile patch since workfile is not "
|
||||
".ma file")
|
||||
return
|
||||
|
||||
compiled_regex = [re.compile(p["regex"]) for p in patches]
|
||||
with open(self.scene_path, "r+") as pf:
|
||||
scene_data = pf.readlines()
|
||||
for ln, line in enumerate(scene_data):
|
||||
for i, r in enumerate(compiled_regex):
|
||||
if re.match(r, line):
|
||||
scene_data.insert(ln + 1, patches[i]["line"])
|
||||
pf.seek(0)
|
||||
pf.writelines(scene_data)
|
||||
pf.truncate()
|
||||
self.log.info("Applied {} patch to scene.".format(
|
||||
patches[i]["name"]
|
||||
))
|
||||
|
||||
def _job_info_label(self, label):
|
||||
return "{label} {job.Name} [{start}-{end}]".format(
|
||||
label=label,
|
||||
job=self.job_info,
|
||||
start=int(self._instance.data["frameStartHandle"]),
|
||||
end=int(self._instance.data["frameEndHandle"]),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MayaSubmitDeadline, cls).get_attribute_defs()
|
||||
|
||||
defs.extend([
|
||||
NumberDef("priority",
|
||||
label="Priority",
|
||||
default=cls.default_priority,
|
||||
decimals=0),
|
||||
NumberDef("chunkSize",
|
||||
label="Frames Per Task",
|
||||
default=1,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000),
|
||||
TextDef("machineList",
|
||||
label="Machine List",
|
||||
default="",
|
||||
placeholder="machine1,machine2"),
|
||||
EnumDef("whitelist",
|
||||
label="Machine List (Allow/Deny)",
|
||||
items={
|
||||
True: "Allow List",
|
||||
False: "Deny List",
|
||||
},
|
||||
default=False),
|
||||
NumberDef("tile_priority",
|
||||
label="Tile Assembler Priority",
|
||||
decimals=0,
|
||||
default=cls.tile_priority),
|
||||
BoolDef("strict_error_checking",
|
||||
label="Strict Error Checking",
|
||||
default=cls.strict_error_checking),
|
||||
|
||||
])
|
||||
|
||||
return defs
|
||||
|
||||
def _format_tiles(
|
||||
filename,
|
||||
index,
|
||||
tiles_x,
|
||||
tiles_y,
|
||||
width,
|
||||
height,
|
||||
prefix,
|
||||
reversed_y=False
|
||||
):
|
||||
"""Generate tile entries for Deadline tile job.
|
||||
|
||||
Returns two dictionaries - one that can be directly used in Deadline
|
||||
job, second that can be used for Deadline Assembly job configuration
|
||||
file.
|
||||
|
||||
This will format tile names:
|
||||
|
||||
Example::
|
||||
{
|
||||
"OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr",
|
||||
"OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr"
|
||||
}
|
||||
|
||||
And add tile prefixes like:
|
||||
|
||||
Example::
|
||||
Image prefix is:
|
||||
`<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>`
|
||||
|
||||
Result for tile 0 for 4x4 will be:
|
||||
`<Scene>/<RenderLayer>/_tile_1x1_4x4_<RenderLayer>_<RenderPass>`
|
||||
|
||||
Calculating coordinates is tricky as in Job they are defined as top,
|
||||
left, bottom, right with zero being in top-left corner. But Assembler
|
||||
configuration file takes tile coordinates as X, Y, Width and Height and
|
||||
zero is bottom left corner.
|
||||
|
||||
Args:
|
||||
filename (str): Filename to process as tiles.
|
||||
index (int): Index of that file if it is sequence.
|
||||
tiles_x (int): Number of tiles in X.
|
||||
tiles_y (int): Number of tiles in Y.
|
||||
width (int): Width resolution of final image.
|
||||
height (int): Height resolution of final image.
|
||||
prefix (str): Image prefix.
|
||||
reversed_y (bool): Reverses the order of the y tiles.
|
||||
|
||||
Returns:
|
||||
(dict, dict): Tuple of two dictionaries - first can be used to
|
||||
extend JobInfo, second has tiles x, y, width and height
|
||||
used for assembler configuration.
|
||||
|
||||
"""
|
||||
# Math used requires integers for correct output - as such
|
||||
# we ensure our inputs are correct.
|
||||
assert isinstance(tiles_x, int), "tiles_x must be an integer"
|
||||
assert isinstance(tiles_y, int), "tiles_y must be an integer"
|
||||
assert isinstance(width, int), "width must be an integer"
|
||||
assert isinstance(height, int), "height must be an integer"
|
||||
|
||||
out = {"JobInfo": {}, "PluginInfo": {}}
|
||||
cfg = OrderedDict()
|
||||
w_space = width // tiles_x
|
||||
h_space = height // tiles_y
|
||||
|
||||
cfg["TilesCropped"] = "False"
|
||||
|
||||
tile = 0
|
||||
range_y = range(1, tiles_y + 1)
|
||||
reversed_y_range = list(reversed(range_y))
|
||||
for tile_x in range(1, tiles_x + 1):
|
||||
for i, tile_y in enumerate(range_y):
|
||||
tile_y_index = tile_y
|
||||
if reversed_y:
|
||||
tile_y_index = reversed_y_range[i]
|
||||
|
||||
tile_prefix = "_tile_{}x{}_{}x{}_".format(
|
||||
tile_x, tile_y_index, tiles_x, tiles_y
|
||||
)
|
||||
|
||||
new_filename = "{}/{}{}".format(
|
||||
os.path.dirname(filename),
|
||||
tile_prefix,
|
||||
os.path.basename(filename)
|
||||
)
|
||||
|
||||
top = height - (tile_y * h_space)
|
||||
bottom = height - ((tile_y - 1) * h_space) - 1
|
||||
left = (tile_x - 1) * w_space
|
||||
right = (tile_x * w_space) - 1
|
||||
|
||||
# Job info
|
||||
key = "OutputFilename{}".format(index)
|
||||
out["JobInfo"][key] = new_filename
|
||||
|
||||
# Plugin Info
|
||||
key = "RegionPrefix{}".format(str(tile))
|
||||
out["PluginInfo"][key] = "/{}".format(
|
||||
tile_prefix
|
||||
).join(prefix.rsplit("/", 1))
|
||||
out["PluginInfo"]["RegionTop{}".format(tile)] = top
|
||||
out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom
|
||||
out["PluginInfo"]["RegionLeft{}".format(tile)] = left
|
||||
out["PluginInfo"]["RegionRight{}".format(tile)] = right
|
||||
|
||||
# Tile config
|
||||
cfg["Tile{}FileName".format(tile)] = new_filename
|
||||
cfg["Tile{}X".format(tile)] = left
|
||||
cfg["Tile{}Y".format(tile)] = top
|
||||
cfg["Tile{}Width".format(tile)] = w_space
|
||||
cfg["Tile{}Height".format(tile)] = h_space
|
||||
|
||||
tile += 1
|
||||
|
||||
return out, cfg
|
||||
|
|
@ -1,558 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Submit write to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Nuke to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender"]
|
||||
optional = True
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
# presets
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
group = ""
|
||||
department = ""
|
||||
limit_groups = []
|
||||
use_gpu = False
|
||||
env_allowed_keys = []
|
||||
env_search_replace_values = []
|
||||
workfile_dependency = True
|
||||
use_published_workfile = True
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurrency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"use_gpu",
|
||||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
),
|
||||
BoolDef(
|
||||
"workfile_dependency",
|
||||
default=cls.workfile_dependency,
|
||||
label="Workfile Dependency"
|
||||
),
|
||||
BoolDef(
|
||||
"use_published_workfile",
|
||||
default=cls.use_published_workfile,
|
||||
label="Use Published Workfile"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
context = instance.context
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = context.data.get("comment", "")
|
||||
self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
submit_frame_start = int(instance.data["frameStartHandle"])
|
||||
submit_frame_end = int(instance.data["frameEndHandle"])
|
||||
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
use_published_workfile = instance.data["attributeValues"].get(
|
||||
"use_published_workfile", self.use_published_workfile
|
||||
)
|
||||
if use_published_workfile:
|
||||
script_path = self._get_published_workfile_path(context)
|
||||
|
||||
# only add main rendering job if target is not frames_farm
|
||||
r_job_response_json = None
|
||||
if instance.data["render_target"] != "frames_farm":
|
||||
r_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
r_job_response_json = r_job_response.json()
|
||||
instance.data["deadlineSubmissionJob"] = r_job_response_json
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakingNukeScripts"):
|
||||
for baking_script in instance.data["bakingNukeScripts"]:
|
||||
render_path = baking_script["bakeRenderPath"]
|
||||
script_path = baking_script["bakeScriptPath"]
|
||||
exe_node_name = baking_script["bakeWriteNodeName"]
|
||||
|
||||
b_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
submit_frame_start,
|
||||
submit_frame_end,
|
||||
r_job_response_json,
|
||||
baking_submission=True
|
||||
)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = b_job_response.json()
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# add to list of job Id
|
||||
if not instance.data.get("bakingSubmissionJobs"):
|
||||
instance.data["bakingSubmissionJobs"] = []
|
||||
|
||||
instance.data["bakingSubmissionJobs"].append(
|
||||
b_job_response.json()["_id"])
|
||||
|
||||
# redefinition of families
|
||||
if "render" in instance.data["productType"]:
|
||||
instance.data["family"] = "write"
|
||||
instance.data["productType"] = "write"
|
||||
families.insert(0, "render2d")
|
||||
elif "prerender" in instance.data["productType"]:
|
||||
instance.data["family"] = "write"
|
||||
instance.data["productType"] = "write"
|
||||
families.insert(0, "prerender")
|
||||
instance.data["families"] = families
|
||||
|
||||
def _get_published_workfile_path(self, context):
|
||||
"""This method is temporary while the class is not inherited from
|
||||
AbstractSubmitDeadline"""
|
||||
anatomy = context.data["anatomy"]
|
||||
# WARNING Hardcoded template name 'default' > may not be used
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for instance in context:
|
||||
if (
|
||||
instance.data["productType"] != "workfile"
|
||||
# Disabled instances won't be integrated
|
||||
or instance.data("publish") is False
|
||||
):
|
||||
continue
|
||||
template_data = instance.data["anatomyData"]
|
||||
# Expect workfile instance has only one representation
|
||||
representation = instance.data["representations"][0]
|
||||
# Get workfile extension
|
||||
repre_file = representation["files"]
|
||||
self.log.info(repre_file)
|
||||
ext = os.path.splitext(repre_file)[1].lstrip(".")
|
||||
|
||||
# Fill template data
|
||||
template_data["representation"] = representation["name"]
|
||||
template_data["ext"] = ext
|
||||
template_data["comment"] = None
|
||||
|
||||
template_filled = publish_template.format(template_data)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(
|
||||
script_path
|
||||
)
|
||||
)
|
||||
return script_path
|
||||
|
||||
return None
|
||||
|
||||
def payload_submit(
|
||||
self,
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
response_data=None,
|
||||
baking_submission=False,
|
||||
):
|
||||
"""Submit payload to Deadline
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
script_path (str): path to nuke script
|
||||
render_path (str): path to rendered images
|
||||
exe_node_name (str): name of the node to render
|
||||
start_frame (int): start frame
|
||||
end_frame (int): end frame
|
||||
response_data Optional[dict]: response data from
|
||||
previous submission
|
||||
baking_submission Optional[bool]: if it's baking submission
|
||||
|
||||
Returns:
|
||||
requests.Response
|
||||
"""
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
|
||||
# batch name
|
||||
src_filepath = instance.context.data["currentFile"]
|
||||
batch_name = os.path.basename(src_filepath)
|
||||
job_name = os.path.basename(render_path)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not response_data:
|
||||
response_data = {}
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(render_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# resolve any limit groups
|
||||
limit_groups = self.get_limit_groups()
|
||||
self.log.debug("Limit groups: `{}`".format(limit_groups))
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": batch_name,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": job_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Priority": instance.data["attributeValues"].get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": instance.data["attributeValues"].get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": instance.data["attributeValues"].get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
"Department": self.department,
|
||||
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Group": self.group,
|
||||
|
||||
"Plugin": "Nuke",
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=start_frame,
|
||||
end=end_frame
|
||||
),
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
|
||||
# limiting groups
|
||||
"LimitGroups": ",".join(limit_groups)
|
||||
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": script_path,
|
||||
|
||||
# Output directory and filename
|
||||
"OutputFilePath": render_dir.replace("\\", "/"),
|
||||
# "OutputFilePrefix": render_variables["filename_prefix"],
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": self._ver.group(),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": script_path,
|
||||
"AWSAssetFile0": render_path,
|
||||
|
||||
# using GPU by default
|
||||
"UseGpu": instance.data["attributeValues"].get(
|
||||
"use_gpu", self.use_gpu),
|
||||
|
||||
# Only the specific write node is rendered.
|
||||
"WriteNode": exe_node_name
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Add workfile dependency.
|
||||
workfile_dependency = instance.data["attributeValues"].get(
|
||||
"workfile_dependency", self.workfile_dependency
|
||||
)
|
||||
if workfile_dependency:
|
||||
payload["JobInfo"].update({"AssetDependency0": script_path})
|
||||
|
||||
# TODO: rewrite for baking with sequences
|
||||
if baking_submission:
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
})
|
||||
|
||||
# Include critical environment variables with submission
|
||||
keys = [
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"PYBLISHPLUGINPATH",
|
||||
"NUKE_PATH",
|
||||
"TOOL_ENV",
|
||||
"FOUNDRY_LICENSE",
|
||||
"OPENPYPE_SG_USER",
|
||||
]
|
||||
|
||||
# add allowed keys from preset if any
|
||||
if self.env_allowed_keys:
|
||||
keys += self.env_allowed_keys
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
# to recognize render jobs
|
||||
environment["AYON_RENDER_JOB"] = "1"
|
||||
|
||||
# finally search replace in values of any key
|
||||
if self.env_search_replace_values:
|
||||
for key, value in environment.items():
|
||||
for item in self.env_search_replace_values:
|
||||
environment[key] = value.replace(
|
||||
item["name"], item["value"]
|
||||
)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.debug("using render plugin : {}".format(plugin))
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expected files to instance.data
|
||||
self.expected_files(
|
||||
instance,
|
||||
render_path,
|
||||
start_frame,
|
||||
end_frame
|
||||
)
|
||||
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url,
|
||||
json=payload,
|
||||
timeout=10,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
return response
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("frameStart", "frameEnd"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
|
||||
self.log.debug("_ search_results: `{}`".format(search_results))
|
||||
return int(search_results[1])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
|
||||
def expected_files(
|
||||
self,
|
||||
instance,
|
||||
filepath,
|
||||
start_frame,
|
||||
end_frame
|
||||
):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = []
|
||||
|
||||
dirname = os.path.dirname(filepath)
|
||||
file = os.path.basename(filepath)
|
||||
|
||||
# since some files might be already tagged as publish_on_farm
|
||||
# we need to avoid adding them to expected files since those would be
|
||||
# duplicated into metadata.json file
|
||||
representations = instance.data.get("representations", [])
|
||||
# check if file is not in representations with publish_on_farm tag
|
||||
for repre in representations:
|
||||
# Skip if 'publish_on_farm' not available
|
||||
if "publish_on_farm" not in repre.get("tags", []):
|
||||
continue
|
||||
|
||||
# in case where single file (video, image) is already in
|
||||
# representation file. Will be added to expected files via
|
||||
# submit_publish_job.py
|
||||
if file in repre.get("files", []):
|
||||
self.log.debug(
|
||||
"Skipping expected file: {}".format(filepath))
|
||||
return
|
||||
|
||||
# in case path is hashed sequence expression
|
||||
# (e.g. /path/to/file.####.png)
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
# in case input path was single file (video or image)
|
||||
if "%" not in file:
|
||||
instance.data["expectedFiles"].append(filepath)
|
||||
return
|
||||
|
||||
# shift start frame by 1 if slate is present
|
||||
if instance.data.get("slate"):
|
||||
start_frame -= 1
|
||||
|
||||
# add sequence files to expected files
|
||||
for i in range(start_frame, (end_frame + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dirname, (file % i)).replace("\\", "/"))
|
||||
|
||||
def get_limit_groups(self):
|
||||
"""Search for limit group nodes and return group name.
|
||||
Limit groups will be defined as pairs in Nuke deadline submitter
|
||||
presents where the key will be name of limit group and value will be
|
||||
a list of plugin's node class names. Thus, when a plugin uses more
|
||||
than one node, these will be captured and the triggered process
|
||||
will add the appropriate limit group to the payload jobinfo attributes.
|
||||
Returning:
|
||||
list: captured groups list
|
||||
"""
|
||||
# Not all hosts can import this module.
|
||||
import nuke
|
||||
|
||||
captured_groups = []
|
||||
for limit_group in self.limit_groups:
|
||||
lg_name = limit_group["name"]
|
||||
|
||||
for node_class in limit_group["value"]:
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
# ignore all nodes not member of defined class
|
||||
if node.Class() not in node_class:
|
||||
continue
|
||||
# ignore all disabled nodes
|
||||
if node["disable"].value():
|
||||
continue
|
||||
# add group name if not already added
|
||||
if lg_name not in captured_groups:
|
||||
captured_groups.append(lg_name)
|
||||
return captured_groups
|
||||
|
|
@ -1,463 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
from ayon_core.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance_cache,
|
||||
create_instances_for_cache,
|
||||
attach_instances_to_product,
|
||||
prepare_cache_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
||||
publish.AYONPyblishPluginMixin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Process Cache Job submitted on farm
|
||||
This is replicated version of submit publish job
|
||||
specifically for cache(s).
|
||||
|
||||
These jobs are dependent on a deadline job
|
||||
submission prior to this plug-in.
|
||||
|
||||
- In case of Deadline, it creates dependent job on farm publishing
|
||||
rendered image sequence.
|
||||
|
||||
Options in instance.data:
|
||||
- deadlineSubmissionJob (dict, Required): The returned .json
|
||||
data from the job submission to deadline.
|
||||
|
||||
- outputDir (str, Required): The output directory where the metadata
|
||||
file should be generated. It's assumed that this will also be
|
||||
final folder containing the output files.
|
||||
|
||||
- ext (str, Optional): The extension (including `.`) that is required
|
||||
in the output filename to be picked up for image sequence
|
||||
publishing.
|
||||
|
||||
- expectedFiles (list or dict): explained below
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit cache jobs to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
settings_category = "deadline"
|
||||
|
||||
targets = ["local"]
|
||||
|
||||
hosts = ["houdini"]
|
||||
|
||||
families = ["publish.hou"]
|
||||
|
||||
environ_keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"AYON_SG_USERNAME",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
deadline_department = ""
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_priority = None
|
||||
|
||||
# regex for finding frame number in string
|
||||
R_FRAME_NUMBER = re.compile(r'.+\.(?P<frame>[0-9]+)\..+')
|
||||
|
||||
plugin_pype_version = "3.0"
|
||||
|
||||
# script path for publish_filesequence.py
|
||||
publishing_script = None
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
Returns:
|
||||
(str): deadline_publish_job_id
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
product_name = data["productName"]
|
||||
job_name = "Publish - {}".format(product_name)
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# instance.data.get("productName") != instances[0]["productName"]
|
||||
# 'Main' vs 'renderMain'
|
||||
override_version = None
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
override_version = instance_version
|
||||
|
||||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("folderEntity"),
|
||||
instance.data["productName"],
|
||||
instance.context,
|
||||
instance.data["productType"],
|
||||
override_version
|
||||
)
|
||||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"AYON_IN_TESTS": str(int(is_in_tests())),
|
||||
"AYON_PUBLISH_JOB": "1",
|
||||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
for env_key in self.environ_keys:
|
||||
if os.getenv(env_key):
|
||||
environment[env_key] = os.environ[env_key]
|
||||
|
||||
priority = self.deadline_priority or instance.data.get("priority", 50)
|
||||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
'publish',
|
||||
'"{}"'.format(rootless_metadata_path),
|
||||
"--targets", "deadline",
|
||||
"--targets", "farm"
|
||||
]
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "Ayon",
|
||||
"BatchName": job["Props"]["Batch"],
|
||||
"Name": job_name,
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"ChunkSize": self.deadline_chunk_size,
|
||||
"Priority": priority,
|
||||
"InitialStatus": initial_status,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": self.plugin_pype_version,
|
||||
"Arguments": " ".join(args),
|
||||
"SingleFrameOnly": "True",
|
||||
},
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": [],
|
||||
}
|
||||
|
||||
if job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
payload["JobInfo"].update(
|
||||
{
|
||||
"EnvironmentKeyValue%d"
|
||||
% index: "{key}={value}".format(
|
||||
key=key_, value=value_
|
||||
)
|
||||
}
|
||||
)
|
||||
# remove secondary pool
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
deadline_publish_job_id = response.json()["_id"]
|
||||
|
||||
return deadline_publish_job_id
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of render farm submission and create and post dependent
|
||||
job in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance_cache(instance)
|
||||
"""
|
||||
if content of `expectedFiles` list are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance for every one of them.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
{
|
||||
"beauty": [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr"
|
||||
],
|
||||
|
||||
"Z": [
|
||||
"boo_v01.0001.exr",
|
||||
"boo_v01.0002.exr"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
This will create instances for `beauty` and `Z` product
|
||||
adding those files to their respective representations.
|
||||
|
||||
If we have only list of files, we collect all file sequences.
|
||||
More then one doesn't probably make sense, but we'll handle it
|
||||
like creating one instance with multiple representations.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr",
|
||||
"xxx_v01.0001.exr",
|
||||
"xxx_v01.0002.exr"
|
||||
]
|
||||
|
||||
This will result in one instance with two representations:
|
||||
`foo` and `xxx`
|
||||
"""
|
||||
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_cache(
|
||||
instance, instance_skeleton_data)
|
||||
else:
|
||||
representations = prepare_cache_representations(
|
||||
instance_skeleton_data,
|
||||
instance.data.get("expectedFiles"),
|
||||
anatomy
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# attach instances to product
|
||||
if instance.data.get("attachTo"):
|
||||
instances = attach_instances_to_product(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
r''' SUBMiT PUBLiSH JOB 2 D34DLiN3
|
||||
____
|
||||
' ' .---. .---. .--. .---. .--..--..--..--. .---.
|
||||
| | --= \ | . \/ _|/ \| . \ || || \ |/ _|
|
||||
| JOB | --= / | | || __| .. | | | |;_ || \ || __|
|
||||
| | |____./ \.__|._||_.|___./|_____|||__|\__|\.___|
|
||||
._____.
|
||||
|
||||
'''
|
||||
|
||||
render_job = None
|
||||
submission_type = ""
|
||||
if instance.data.get("toBeRenderedOn") == "deadline":
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
submission_type = "deadline"
|
||||
|
||||
if not render_job:
|
||||
import getpass
|
||||
|
||||
render_job = {}
|
||||
self.log.debug("Faking job data ...")
|
||||
render_job["Props"] = {}
|
||||
# Render job doesn't exist because we do not have prior submission.
|
||||
# We still use data from it so lets fake it.
|
||||
#
|
||||
# Batch name reflect original scene name
|
||||
|
||||
if instance.data.get("assemblySubmissionJobs"):
|
||||
render_job["Props"]["Batch"] = instance.data.get(
|
||||
"jobBatchName")
|
||||
else:
|
||||
batch = os.path.splitext(os.path.basename(
|
||||
instance.context.data.get("currentFile")))[0]
|
||||
render_job["Props"]["Batch"] = batch
|
||||
# User is deadline user
|
||||
render_job["Props"]["User"] = instance.context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job)
|
||||
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
if "deadline" not in inst:
|
||||
inst["deadline"] = {}
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": render_job or None,
|
||||
"instances": instances
|
||||
}
|
||||
|
||||
if deadline_publish_job_id:
|
||||
publish_job["deadline_publish_job_id"] = deadline_publish_job_id
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _get_publish_folder(self, anatomy, template_data,
|
||||
folder_entity, product_name, context,
|
||||
product_type, version=None):
|
||||
"""
|
||||
Extracted logic to pre-calculate real publish folder, which is
|
||||
calculated in IntegrateNew inside of Deadline process.
|
||||
This should match logic in:
|
||||
'collect_anatomy_instance_data' - to
|
||||
get correct anatomy, family, version for product and
|
||||
'collect_resources_path'
|
||||
get publish_path
|
||||
|
||||
Args:
|
||||
anatomy (ayon_core.pipeline.anatomy.Anatomy):
|
||||
template_data (dict): pre-calculated collected data for process
|
||||
folder_entity (dict[str, Any]): Folder entity.
|
||||
product_name (str): Product name (actually group name of product).
|
||||
product_type (str): for current deadline process it's always
|
||||
'render'
|
||||
TODO - for generic use family needs to be dynamically
|
||||
calculated like IntegrateNew does
|
||||
version (int): override version from instance if exists
|
||||
|
||||
Returns:
|
||||
(string): publish folder where rendered and published files will
|
||||
be stored
|
||||
based on 'publish' template
|
||||
"""
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
host_name = context.data["hostName"]
|
||||
if not version:
|
||||
version_entity = None
|
||||
if folder_entity:
|
||||
version_entity = ayon_api.get_last_version_by_product_name(
|
||||
project_name,
|
||||
product_name,
|
||||
folder_entity["id"]
|
||||
)
|
||||
|
||||
if version_entity:
|
||||
version = int(version_entity["version"]) + 1
|
||||
else:
|
||||
version = get_versioning_start(
|
||||
project_name,
|
||||
host_name,
|
||||
task_name=template_data["task"]["name"],
|
||||
task_type=template_data["task"]["type"],
|
||||
product_type="render",
|
||||
product_name=product_name,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
task_info = template_data.get("task") or {}
|
||||
|
||||
template_name = publish.get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
product_type,
|
||||
task_info.get("name"),
|
||||
task_info.get("type"),
|
||||
)
|
||||
|
||||
template_data["subset"] = product_name
|
||||
template_data["family"] = product_type
|
||||
template_data["version"] = version
|
||||
template_data["product"] = {
|
||||
"name": product_name,
|
||||
"type": product_type,
|
||||
}
|
||||
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
EnumDef("publishJobState",
|
||||
label="Publish Job State",
|
||||
items=["Active", "Suspended"],
|
||||
default="Active")
|
||||
]
|
||||
|
|
@ -1,585 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import clique
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
from ayon_core.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance,
|
||||
create_instances_for_aov,
|
||||
attach_instances_to_product,
|
||||
prepare_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
def get_resource_files(resources, frame_range=None):
|
||||
"""Get resource files at given path.
|
||||
|
||||
If `frame_range` is specified those outside will be removed.
|
||||
|
||||
Arguments:
|
||||
resources (list): List of resources
|
||||
frame_range (list): Frame range to apply override
|
||||
|
||||
Returns:
|
||||
list of str: list of collected resources
|
||||
|
||||
"""
|
||||
res_collections, _ = clique.assemble(resources)
|
||||
assert len(res_collections) == 1, "Multiple collections found"
|
||||
res_collection = res_collections[0]
|
||||
|
||||
# Remove any frames
|
||||
if frame_range is not None:
|
||||
for frame in frame_range:
|
||||
if frame not in res_collection.indexes:
|
||||
continue
|
||||
res_collection.indexes.remove(frame)
|
||||
|
||||
return list(res_collection)
|
||||
|
||||
|
||||
class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
||||
publish.AYONPyblishPluginMixin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Process Job submitted on farm.
|
||||
|
||||
These jobs are dependent on a deadline job
|
||||
submission prior to this plug-in.
|
||||
|
||||
It creates dependent job on farm publishing rendered image sequence.
|
||||
|
||||
Options in instance.data:
|
||||
- deadlineSubmissionJob (dict, Required): The returned .json
|
||||
data from the job submission to deadline.
|
||||
|
||||
- outputDir (str, Required): The output directory where the metadata
|
||||
file should be generated. It's assumed that this will also be
|
||||
final folder containing the output files.
|
||||
|
||||
- ext (str, Optional): The extension (including `.`) that is required
|
||||
in the output filename to be picked up for image sequence
|
||||
publishing.
|
||||
|
||||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
- expectedFiles (list or dict): explained below
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Image Publishing job to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
|
||||
targets = ["local"]
|
||||
|
||||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony", "blender"]
|
||||
|
||||
families = ["render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop",
|
||||
"redshift_rop", "usdrender"]
|
||||
settings_category = "deadline"
|
||||
|
||||
aov_filter = [
|
||||
{
|
||||
"name": "maya",
|
||||
"value": [r".*([Bb]eauty).*"]
|
||||
},
|
||||
{
|
||||
"name": "blender",
|
||||
"value": [r".*([Bb]eauty).*"]
|
||||
},
|
||||
{
|
||||
# for everything from AE
|
||||
"name": "aftereffects",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "harmony",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "celaction",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "max",
|
||||
"value": [r".*"]
|
||||
},
|
||||
]
|
||||
|
||||
environ_keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"AYON_SG_USERNAME",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
deadline_department = ""
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_priority = None
|
||||
|
||||
# regex for finding frame number in string
|
||||
R_FRAME_NUMBER = re.compile(r'.+\.(?P<frame>[0-9]+)\..+')
|
||||
|
||||
# mapping of instance properties to be transferred to new instance
|
||||
# for every specified family
|
||||
instance_transfer = {
|
||||
"slate": ["slateFrames", "slate"],
|
||||
"review": ["lutPath"],
|
||||
"render2d": ["bakingNukeScripts", "version"],
|
||||
"renderlayer": ["convertToScanline"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
|
||||
plugin_pype_version = "3.0"
|
||||
|
||||
# script path for publish_filesequence.py
|
||||
publishing_script = None
|
||||
|
||||
# poor man exclusion
|
||||
skip_integration_repre_list = []
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job, instances):
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
Returns:
|
||||
(str): deadline_publish_job_id
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
product_name = data["productName"]
|
||||
job_name = "Publish - {}".format(product_name)
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# instance.data.get("productName") != instances[0]["productName"]
|
||||
# 'Main' vs 'renderMain'
|
||||
override_version = None
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
override_version = instance_version
|
||||
|
||||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("folderEntity"),
|
||||
instances[0]["productName"],
|
||||
instance.context,
|
||||
instances[0]["productType"],
|
||||
override_version
|
||||
)
|
||||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"AYON_IN_TESTS": str(int(is_in_tests())),
|
||||
"AYON_PUBLISH_JOB": "1",
|
||||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
for env_key in self.environ_keys:
|
||||
if os.getenv(env_key):
|
||||
environment[env_key] = os.environ[env_key]
|
||||
|
||||
priority = self.deadline_priority or instance.data.get("priority", 50)
|
||||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
'publish',
|
||||
'"{}"'.format(rootless_metadata_path),
|
||||
"--targets", "deadline",
|
||||
"--targets", "farm"
|
||||
]
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "Ayon",
|
||||
"BatchName": job["Props"]["Batch"],
|
||||
"Name": job_name,
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"ChunkSize": self.deadline_chunk_size,
|
||||
"Priority": priority,
|
||||
"InitialStatus": initial_status,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": self.plugin_pype_version,
|
||||
"Arguments": " ".join(args),
|
||||
"SingleFrameOnly": "True",
|
||||
},
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": [],
|
||||
}
|
||||
|
||||
# add assembly jobs as dependencies
|
||||
if instance.data.get("tileRendering"):
|
||||
self.log.info("Adding tile assembly jobs as dependencies...")
|
||||
job_index = 0
|
||||
for assembly_id in instance.data.get("assemblySubmissionJobs"):
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
elif instance.data.get("bakingSubmissionJobs"):
|
||||
self.log.info(
|
||||
"Adding baking submission jobs as dependencies..."
|
||||
)
|
||||
job_index = 0
|
||||
for assembly_id in instance.data["bakingSubmissionJobs"]:
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
elif job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
payload["JobInfo"].update(
|
||||
{
|
||||
"EnvironmentKeyValue%d"
|
||||
% index: "{key}={value}".format(
|
||||
key=key_, value=value_
|
||||
)
|
||||
}
|
||||
)
|
||||
# remove secondary pool
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
deadline_publish_job_id = response.json()["_id"]
|
||||
|
||||
return deadline_publish_job_id
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of render farm submission and create and post dependent
|
||||
job in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
instance, families_transfer=self.families_transfer,
|
||||
instance_transfer=self.instance_transfer)
|
||||
"""
|
||||
if content of `expectedFiles` list are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance for every one of them.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
{
|
||||
"beauty": [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr"
|
||||
],
|
||||
|
||||
"Z": [
|
||||
"boo_v01.0001.exr",
|
||||
"boo_v01.0002.exr"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
This will create instances for `beauty` and `Z` product
|
||||
adding those files to their respective representations.
|
||||
|
||||
If we have only list of files, we collect all file sequences.
|
||||
More then one doesn't probably make sense, but we'll handle it
|
||||
like creating one instance with multiple representations.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr",
|
||||
"xxx_v01.0001.exr",
|
||||
"xxx_v01.0002.exr"
|
||||
]
|
||||
|
||||
This will result in one instance with two representations:
|
||||
`foo` and `xxx`
|
||||
"""
|
||||
do_not_add_review = False
|
||||
if instance.data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
aov_filter = {
|
||||
item["name"]: item["value"]
|
||||
for item in self.aov_filter
|
||||
}
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_aov(
|
||||
instance, instance_skeleton_data,
|
||||
aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review
|
||||
)
|
||||
else:
|
||||
representations = prepare_representations(
|
||||
instance_skeleton_data,
|
||||
instance.data.get("expectedFiles"),
|
||||
anatomy,
|
||||
aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review,
|
||||
instance.context,
|
||||
self
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# attach instances to product
|
||||
if instance.data.get("attachTo"):
|
||||
instances = attach_instances_to_product(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
r''' SUBMiT PUBLiSH JOB 2 D34DLiN3
|
||||
____
|
||||
' ' .---. .---. .--. .---. .--..--..--..--. .---.
|
||||
| | --= \ | . \/ _|/ \| . \ || || \ |/ _|
|
||||
| JOB | --= / | | || __| .. | | | |;_ || \ || __|
|
||||
| | |____./ \.__|._||_.|___./|_____|||__|\__|\.___|
|
||||
._____.
|
||||
|
||||
'''
|
||||
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
if not render_job and instance.data.get("tileRendering") is False:
|
||||
raise AssertionError(("Cannot continue without valid "
|
||||
"Deadline submission."))
|
||||
if not render_job:
|
||||
import getpass
|
||||
|
||||
render_job = {}
|
||||
self.log.debug("Faking job data ...")
|
||||
render_job["Props"] = {}
|
||||
# Render job doesn't exist because we do not have prior submission.
|
||||
# We still use data from it so lets fake it.
|
||||
#
|
||||
# Batch name reflect original scene name
|
||||
|
||||
if instance.data.get("assemblySubmissionJobs"):
|
||||
render_job["Props"]["Batch"] = instance.data.get(
|
||||
"jobBatchName")
|
||||
else:
|
||||
batch = os.path.splitext(os.path.basename(
|
||||
instance.context.data.get("currentFile")))[0]
|
||||
render_job["Props"]["Batch"] = batch
|
||||
# User is deadline user
|
||||
render_job["Props"]["User"] = instance.context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
render_job["Props"]["Env"] = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
|
||||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
# Inject deadline url to instances to query DL for job id for overrides
|
||||
for inst in instances:
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": render_job or None,
|
||||
"instances": instances
|
||||
}
|
||||
|
||||
if deadline_publish_job_id:
|
||||
publish_job["deadline_publish_job_id"] = deadline_publish_job_id
|
||||
|
||||
# add audio to metadata file if available
|
||||
audio_file = instance.context.data.get("audioFile")
|
||||
if audio_file and os.path.isfile(audio_file):
|
||||
publish_job.update({"audio": audio_file})
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _get_publish_folder(self, anatomy, template_data,
|
||||
folder_entity, product_name, context,
|
||||
product_type, version=None):
|
||||
"""
|
||||
Extracted logic to pre-calculate real publish folder, which is
|
||||
calculated in IntegrateNew inside of Deadline process.
|
||||
This should match logic in:
|
||||
'collect_anatomy_instance_data' - to
|
||||
get correct anatomy, family, version for product name and
|
||||
'collect_resources_path'
|
||||
get publish_path
|
||||
|
||||
Args:
|
||||
anatomy (ayon_core.pipeline.anatomy.Anatomy):
|
||||
template_data (dict): pre-calculated collected data for process
|
||||
folder_entity (dict[str, Any]): Folder entity.
|
||||
product_name (string): Product name (actually group name
|
||||
of product)
|
||||
product_type (string): for current deadline process it's always
|
||||
'render'
|
||||
TODO - for generic use family needs to be dynamically
|
||||
calculated like IntegrateNew does
|
||||
version (int): override version from instance if exists
|
||||
|
||||
Returns:
|
||||
(string): publish folder where rendered and published files will
|
||||
be stored
|
||||
based on 'publish' template
|
||||
"""
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
host_name = context.data["hostName"]
|
||||
if not version:
|
||||
version_entity = None
|
||||
if folder_entity:
|
||||
version_entity = ayon_api.get_last_version_by_product_name(
|
||||
project_name,
|
||||
product_name,
|
||||
folder_entity["id"]
|
||||
)
|
||||
|
||||
if version_entity:
|
||||
version = int(version_entity["version"]) + 1
|
||||
else:
|
||||
version = get_versioning_start(
|
||||
project_name,
|
||||
host_name,
|
||||
task_name=template_data["task"]["name"],
|
||||
task_type=template_data["task"]["type"],
|
||||
product_type="render",
|
||||
product_name=product_name,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
host_name = context.data["hostName"]
|
||||
task_info = template_data.get("task") or {}
|
||||
|
||||
template_name = publish.get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
product_type,
|
||||
task_info.get("name"),
|
||||
task_info.get("type"),
|
||||
)
|
||||
|
||||
template_data["version"] = version
|
||||
template_data["subset"] = product_name
|
||||
template_data["family"] = product_type
|
||||
template_data["product"] = {
|
||||
"name": product_name,
|
||||
"type": product_type,
|
||||
}
|
||||
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
EnumDef("publishJobState",
|
||||
label="Publish Job State",
|
||||
items=["Active", "Suspended"],
|
||||
default="Active")
|
||||
]
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
||||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"]
|
||||
families = ["renderlayer", "render", "render.farm"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
kwargs = {}
|
||||
if instance.data["deadline"]["require_authentication"]:
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
kwargs["auth"] = auth
|
||||
|
||||
if not auth[0]:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"At least username is required to be set in "
|
||||
"Site Settings.")
|
||||
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url, **kwargs)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
if response.status_code == 401:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"Provided credentials are not working. "
|
||||
"Please change them in Site Settings")
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
)
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
||||
pyblish.api.InstancePlugin):
|
||||
"""Validate primaryPool and secondaryPool on instance.
|
||||
|
||||
Values are on instance based on value insertion when Creating instance or
|
||||
by Settings in CollectDeadlinePools.
|
||||
"""
|
||||
|
||||
label = "Validate Deadline Pools"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"render.frames_farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"publish.hou"]
|
||||
optional = True
|
||||
|
||||
# cache
|
||||
pools_per_url = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
addons_manager = instance.context.data["ayonAddonsManager"]
|
||||
deadline_addon = addons_manager["deadline"]
|
||||
pools = self.get_pools(
|
||||
deadline_addon,
|
||||
deadline_url,
|
||||
instance.data["deadline"].get("auth")
|
||||
)
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
if primary_pool and primary_pool not in pools:
|
||||
invalid_pools["primary"] = primary_pool
|
||||
|
||||
secondary_pool = instance.data.get("secondaryPool")
|
||||
if secondary_pool and secondary_pool not in pools:
|
||||
invalid_pools["secondary"] = secondary_pool
|
||||
|
||||
if invalid_pools:
|
||||
message = "\n".join(
|
||||
"{} pool '{}' not available on Deadline".format(key.title(),
|
||||
pool)
|
||||
for key, pool in invalid_pools.items()
|
||||
)
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=message,
|
||||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_pools(self, deadline_addon, deadline_url, auth):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = deadline_addon.get_deadline_pools(
|
||||
deadline_url, auth=auth, log=self.log
|
||||
)
|
||||
# some DL return "none" as a pool name
|
||||
if "none" not in pools:
|
||||
pools.append("none")
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
return self.pools_per_url[deadline_url]
|
||||
|
|
@ -1,256 +0,0 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import collect_frames
|
||||
from ayon_deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
||||
"""Compare rendered and expected files"""
|
||||
|
||||
label = "Validate rendered files from Deadline"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["render"]
|
||||
targets = ["deadline"]
|
||||
|
||||
# check if actual frame range on render job wasn't different
|
||||
# case when artists wants to render only subset of frames
|
||||
allow_user_override = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
||||
# get dependency jobs ids for retrieving frame list
|
||||
dependent_job_ids = self._get_dependent_job_ids(instance)
|
||||
|
||||
if not dependent_job_ids:
|
||||
self.log.warning("No dependent jobs found for instance: {}"
|
||||
"".format(instance))
|
||||
return
|
||||
|
||||
# get list of frames from dependent jobs
|
||||
frame_list = self._get_dependent_jobs_frames(
|
||||
instance, dependent_job_ids)
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
expected_files = self._get_expected_files(repre)
|
||||
|
||||
staging_dir = repre["stagingDir"]
|
||||
existing_files = self._get_existing_files(staging_dir)
|
||||
|
||||
if self.allow_user_override:
|
||||
# We always check for user override because the user might have
|
||||
# also overridden the Job frame list to be longer than the
|
||||
# originally submitted frame range
|
||||
# todo: We should first check if Job frame range was overridden
|
||||
# at all so we don't unnecessarily override anything
|
||||
file_name_template, frame_placeholder = \
|
||||
self._get_file_name_template_and_placeholder(
|
||||
expected_files)
|
||||
|
||||
if not file_name_template:
|
||||
raise RuntimeError("Unable to retrieve file_name template"
|
||||
"from files: {}".format(expected_files))
|
||||
|
||||
job_expected_files = self._get_job_expected_files(
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list)
|
||||
|
||||
job_files_diff = job_expected_files.difference(expected_files)
|
||||
if job_files_diff:
|
||||
self.log.debug(
|
||||
"Detected difference in expected output files from "
|
||||
"Deadline job. Assuming an updated frame list by the "
|
||||
"user. Difference: {}".format(sorted(job_files_diff))
|
||||
)
|
||||
|
||||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
# single item files must be string not list
|
||||
repre["files"] = (sorted(job_expected_files)
|
||||
if len(job_expected_files) > 1 else
|
||||
list(job_expected_files)[0])
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
||||
# We don't use set.difference because we do allow other existing
|
||||
# files to be in the folder that we might not want to use.
|
||||
missing = expected_files - existing_files
|
||||
if missing:
|
||||
raise RuntimeError(
|
||||
"Missing expected files: {}\n"
|
||||
"Expected files: {}\n"
|
||||
"Existing files: {}".format(
|
||||
sorted(missing),
|
||||
sorted(expected_files),
|
||||
sorted(existing_files)
|
||||
)
|
||||
)
|
||||
|
||||
def _get_dependent_job_ids(self, instance):
|
||||
"""Returns list of dependent job ids from instance metadata.json
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
(list): list of dependent job ids
|
||||
|
||||
"""
|
||||
dependent_job_ids = []
|
||||
|
||||
# job_id collected from metadata.json
|
||||
original_job_id = instance.data["render_job_id"]
|
||||
|
||||
dependent_job_ids_env = os.environ.get("RENDER_JOB_IDS")
|
||||
if dependent_job_ids_env:
|
||||
dependent_job_ids = dependent_job_ids_env.split(',')
|
||||
elif original_job_id:
|
||||
dependent_job_ids = [original_job_id]
|
||||
|
||||
return dependent_job_ids
|
||||
|
||||
def _get_dependent_jobs_frames(self, instance, dependent_job_ids):
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
||||
Render job might be re-submitted so job_id in metadata.json could be
|
||||
invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
dependent_job_ids (list): list of dependent job ids
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
all_frame_lists = []
|
||||
|
||||
for job_id in dependent_job_ids:
|
||||
job_info = self._get_job_info(instance, job_id)
|
||||
frame_list = job_info["Props"].get("Frames")
|
||||
if frame_list:
|
||||
all_frame_lists.extend(frame_list.split(','))
|
||||
|
||||
return all_frame_lists
|
||||
|
||||
def _get_job_expected_files(self,
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list):
|
||||
"""Calculates list of names of expected rendered files.
|
||||
|
||||
Might be different from expected files from submission if user
|
||||
explicitly and manually changed the frame list on the Deadline job.
|
||||
|
||||
"""
|
||||
# no frames in file name at all, eg 'renderCompositingMain.withLut.mov'
|
||||
if not frame_placeholder:
|
||||
return {file_name_template}
|
||||
|
||||
real_expected_rendered = set()
|
||||
src_padding_exp = "%0{}d".format(len(frame_placeholder))
|
||||
for frames in frame_list:
|
||||
if '-' not in frames: # single frame
|
||||
frames = "{}-{}".format(frames, frames)
|
||||
|
||||
start, end = frames.split('-')
|
||||
for frame in range(int(start), int(end) + 1):
|
||||
ren_name = file_name_template.replace(
|
||||
frame_placeholder, src_padding_exp % frame)
|
||||
real_expected_rendered.add(ren_name)
|
||||
|
||||
return real_expected_rendered
|
||||
|
||||
def _get_file_name_template_and_placeholder(self, files):
|
||||
"""Returns file name with frame replaced with # and this placeholder"""
|
||||
sources_and_frames = collect_frames(files)
|
||||
|
||||
file_name_template = frame_placeholder = None
|
||||
for file_name, frame in sources_and_frames.items():
|
||||
|
||||
# There might be cases where clique was unable to collect
|
||||
# collections in `collect_frames` - thus we capture that case
|
||||
if frame is not None:
|
||||
frame_placeholder = "#" * len(frame)
|
||||
|
||||
file_name_template = os.path.basename(
|
||||
file_name.replace(frame, frame_placeholder))
|
||||
else:
|
||||
file_name_template = file_name
|
||||
break
|
||||
|
||||
return file_name_template, frame_placeholder
|
||||
|
||||
def _get_job_info(self, instance, job_id):
|
||||
"""Calls DL for actual job info for 'job_id'
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
job_id (str): Deadline job id
|
||||
|
||||
Returns:
|
||||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
try:
|
||||
kwargs = {}
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(url, **kwargs)
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Deadline is not accessible at "
|
||||
"{}".format(deadline_url))
|
||||
return {}
|
||||
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
self.log.error(response.content)
|
||||
raise RuntimeError(response.text)
|
||||
|
||||
json_content = response.json()
|
||||
if json_content:
|
||||
return json_content.pop()
|
||||
return {}
|
||||
|
||||
def _get_existing_files(self, staging_dir):
|
||||
"""Returns set of existing file names from 'staging_dir'"""
|
||||
existing_files = set()
|
||||
for file_name in os.listdir(staging_dir):
|
||||
existing_files.add(file_name)
|
||||
return existing_files
|
||||
|
||||
def _get_expected_files(self, repre):
|
||||
"""Returns set of file names in representation['files']
|
||||
|
||||
The representations are collected from `CollectRenderedFiles` using
|
||||
the metadata.json file submitted along with the render job.
|
||||
|
||||
Args:
|
||||
repre (dict): The representation containing 'files'
|
||||
|
||||
Returns:
|
||||
set: Set of expected file_names in the staging directory.
|
||||
|
||||
"""
|
||||
expected_files = set()
|
||||
|
||||
files = repre["files"]
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
for file_name in files:
|
||||
expected_files.add(file_name)
|
||||
return expected_files
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 7.5 KiB |
|
|
@ -1,9 +0,0 @@
|
|||
[Arguments]
|
||||
Type=string
|
||||
Label=Arguments
|
||||
Category=Python Options
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Description=The arguments to pass to the script. If no arguments are required, leave this blank.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Ayon Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[AyonExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=Ayon Executable
|
||||
Category=Ayon Executables
|
||||
CategoryOrder=1
|
||||
Index=0
|
||||
Default=
|
||||
Description=The path to the Ayon executable. Enter alternative paths on separate lines.
|
||||
|
||||
[AyonServerUrl]
|
||||
Type=string
|
||||
Label=Ayon Server Url
|
||||
Category=Ayon Credentials
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Default=
|
||||
Description=Url to Ayon server
|
||||
|
||||
[AyonApiKey]
|
||||
Type=password
|
||||
Label=Ayon API key
|
||||
Category=Ayon Credentials
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Default=
|
||||
Description=API key for service account on Ayon Server
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from System.IO import Path
|
||||
from System.Text.RegularExpressions import Regex
|
||||
|
||||
from Deadline.Plugins import PluginType, DeadlinePlugin
|
||||
from Deadline.Scripting import (
|
||||
StringUtils,
|
||||
FileUtils,
|
||||
RepositoryUtils
|
||||
)
|
||||
|
||||
import re
|
||||
import os
|
||||
import platform
|
||||
|
||||
__version__ = "1.0.0"
|
||||
|
||||
######################################################################
|
||||
# This is the function that Deadline calls to get an instance of the
|
||||
# main DeadlinePlugin class.
|
||||
######################################################################
|
||||
def GetDeadlinePlugin():
|
||||
return AyonDeadlinePlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
|
||||
class AyonDeadlinePlugin(DeadlinePlugin):
|
||||
"""
|
||||
Standalone plugin for publishing from Ayon
|
||||
|
||||
Calls Ayonexecutable 'ayon_console' from first correctly found
|
||||
file based on plugin configuration. Uses 'publish' command and passes
|
||||
path to metadata json file, which contains all needed information
|
||||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
||||
def Cleanup(self):
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
def InitializeProcess(self):
|
||||
self.LogInfo(
|
||||
"Initializing process with AYON plugin {}".format(__version__)
|
||||
)
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
|
||||
self.SingleFramesOnly = self.GetBooleanPluginInfoEntryWithDefault(
|
||||
"SingleFramesOnly", False)
|
||||
self.LogInfo("Single Frames Only: %s" % self.SingleFramesOnly)
|
||||
|
||||
self.AddStdoutHandlerCallback(
|
||||
".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress
|
||||
|
||||
def RenderExecutable(self):
|
||||
job = self.GetJob()
|
||||
|
||||
# set required env vars for Ayon
|
||||
# cannot be in InitializeProcess as it is too soon
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
ayon_server_url = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
|
||||
config.GetConfigEntryWithDefault("AyonServerUrl", "")
|
||||
)
|
||||
ayon_api_key = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
|
||||
config.GetConfigEntryWithDefault("AyonApiKey", "")
|
||||
)
|
||||
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
|
||||
|
||||
environment = {
|
||||
"AYON_SERVER_URL": ayon_server_url,
|
||||
"AYON_API_KEY": ayon_api_key,
|
||||
"AYON_BUNDLE_NAME": ayon_bundle_name,
|
||||
}
|
||||
|
||||
for env, val in environment.items():
|
||||
self.SetEnvironmentVariable(env, val)
|
||||
|
||||
exe_list = self.GetConfigEntry("AyonExecutable")
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
"Ayon executable was not found in the semicolon separated "
|
||||
"list: \"{}\". The path to the render executable can be "
|
||||
"configured from the Plugin Configuration in the Deadline "
|
||||
"Monitor.".format(exe_list)
|
||||
)
|
||||
return exe
|
||||
|
||||
def RenderArgument(self):
|
||||
arguments = str(self.GetPluginInfoEntryWithDefault("Arguments", ""))
|
||||
arguments = RepositoryUtils.CheckPathMapping(arguments)
|
||||
|
||||
arguments = re.sub(r"<(?i)STARTFRAME>", str(self.GetStartFrame()),
|
||||
arguments)
|
||||
arguments = re.sub(r"<(?i)ENDFRAME>", str(self.GetEndFrame()),
|
||||
arguments)
|
||||
arguments = re.sub(r"<(?i)QUOTE>", "\"", arguments)
|
||||
|
||||
arguments = self.ReplacePaddedFrame(arguments,
|
||||
"<(?i)STARTFRAME%([0-9]+)>",
|
||||
self.GetStartFrame())
|
||||
arguments = self.ReplacePaddedFrame(arguments,
|
||||
"<(?i)ENDFRAME%([0-9]+)>",
|
||||
self.GetEndFrame())
|
||||
|
||||
count = 0
|
||||
for filename in self.GetAuxiliaryFilenames():
|
||||
localAuxFile = Path.Combine(self.GetJobsDataDirectory(), filename)
|
||||
arguments = re.sub(r"<(?i)AUXFILE" + str(count) + r">",
|
||||
localAuxFile.replace("\\", "/"), arguments)
|
||||
count += 1
|
||||
|
||||
return arguments
|
||||
|
||||
def ReplacePaddedFrame(self, arguments, pattern, frame):
|
||||
frameRegex = Regex(pattern)
|
||||
while True:
|
||||
frameMatch = frameRegex.Match(arguments)
|
||||
if not frameMatch.Success:
|
||||
break
|
||||
paddingSize = int(frameMatch.Groups[1].Value)
|
||||
if paddingSize > 0:
|
||||
padding = StringUtils.ToZeroPaddedString(
|
||||
frame, paddingSize, False)
|
||||
else:
|
||||
padding = str(frame)
|
||||
arguments = arguments.replace(
|
||||
frameMatch.Groups[0].Value, padding)
|
||||
|
||||
return arguments
|
||||
|
||||
def HandleProgress(self):
|
||||
progress = float(self.GetRegexMatch(1))
|
||||
self.SetProgress(progress)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 101 KiB |
|
|
@ -1,38 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Celaction Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[ConcurrentTasks]
|
||||
Type=label
|
||||
Label=ConcurrentTasks
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=True
|
||||
Description=Not configurable
|
||||
|
||||
[Executable]
|
||||
Type=filename
|
||||
Label=Executable
|
||||
Category=Config
|
||||
CategoryOrder=0
|
||||
CategoryIndex=0
|
||||
Description=The command executable to run
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[RenderNameSeparator]
|
||||
Type=string
|
||||
Label=RenderNameSeparator
|
||||
Category=Config
|
||||
CategoryOrder=0
|
||||
CategoryIndex=1
|
||||
Description=The separator to use for naming
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
Default=.
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
from System.Text.RegularExpressions import *
|
||||
|
||||
from Deadline.Plugins import *
|
||||
from Deadline.Scripting import *
|
||||
|
||||
import _winreg
|
||||
|
||||
######################################################################
|
||||
# This is the function that Deadline calls to get an instance of the
|
||||
# main DeadlinePlugin class.
|
||||
######################################################################
|
||||
|
||||
|
||||
def GetDeadlinePlugin():
|
||||
return CelActionPlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
######################################################################
|
||||
# This is the main DeadlinePlugin class for the CelAction plugin.
|
||||
######################################################################
|
||||
|
||||
|
||||
class CelActionPlugin(DeadlinePlugin):
|
||||
|
||||
def __init__(self):
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
self.StartupDirectoryCallback += self.StartupDirectory
|
||||
|
||||
def Cleanup(self):
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
del self.StartupDirectoryCallback
|
||||
|
||||
def GetCelActionRegistryKey(self):
|
||||
# Modify registry for frame separation
|
||||
path = r'Software\CelAction\CelAction2D\User Settings'
|
||||
_winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path)
|
||||
regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0,
|
||||
_winreg.KEY_ALL_ACCESS)
|
||||
return regKey
|
||||
|
||||
def GetSeparatorValue(self, regKey):
|
||||
useSeparator, _ = _winreg.QueryValueEx(
|
||||
regKey, 'RenderNameUseSeparator')
|
||||
separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator')
|
||||
|
||||
return useSeparator, separator
|
||||
|
||||
def SetSeparatorValue(self, regKey, useSeparator, separator):
|
||||
_winreg.SetValueEx(regKey, 'RenderNameUseSeparator',
|
||||
0, _winreg.REG_DWORD, useSeparator)
|
||||
_winreg.SetValueEx(regKey, 'RenderNameSeparator',
|
||||
0, _winreg.REG_SZ, separator)
|
||||
|
||||
def InitializeProcess(self):
|
||||
# Set the plugin specific settings.
|
||||
self.SingleFramesOnly = False
|
||||
|
||||
# Set the process specific settings.
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Rendering.*")
|
||||
self.AddPopupIgnorer(".*AutoRender.*")
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Wait.*")
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Timeline Scrub.*")
|
||||
|
||||
celActionRegKey = self.GetCelActionRegistryKey()
|
||||
|
||||
self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault(
|
||||
"RenderNameSeparator", ".").strip())
|
||||
|
||||
def RenderExecutable(self):
|
||||
return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip())
|
||||
|
||||
def RenderArgument(self):
|
||||
arguments = RepositoryUtils.CheckPathMapping(
|
||||
self.GetPluginInfoEntry("Arguments").strip())
|
||||
arguments = arguments.replace(
|
||||
"<STARTFRAME>", str(self.GetStartFrame()))
|
||||
arguments = arguments.replace("<ENDFRAME>", str(self.GetEndFrame()))
|
||||
arguments = self.ReplacePaddedFrame(
|
||||
arguments, "<STARTFRAME%([0-9]+)>", self.GetStartFrame())
|
||||
arguments = self.ReplacePaddedFrame(
|
||||
arguments, "<ENDFRAME%([0-9]+)>", self.GetEndFrame())
|
||||
arguments = arguments.replace("<QUOTE>", "\"")
|
||||
return arguments
|
||||
|
||||
def StartupDirectory(self):
|
||||
return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip()
|
||||
|
||||
def ReplacePaddedFrame(self, arguments, pattern, frame):
|
||||
frameRegex = Regex(pattern)
|
||||
while True:
|
||||
frameMatch = frameRegex.Match(arguments)
|
||||
if frameMatch.Success:
|
||||
paddingSize = int(frameMatch.Groups[1].Value)
|
||||
if paddingSize > 0:
|
||||
padding = StringUtils.ToZeroPaddedString(
|
||||
frame, paddingSize, False)
|
||||
else:
|
||||
padding = str(frame)
|
||||
arguments = arguments.replace(
|
||||
frameMatch.Groups[0].Value, padding)
|
||||
else:
|
||||
break
|
||||
|
||||
return arguments
|
||||
|
|
@ -1,662 +0,0 @@
|
|||
# /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
import json
|
||||
import platform
|
||||
import uuid
|
||||
import re
|
||||
from Deadline.Scripting import (
|
||||
RepositoryUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
)
|
||||
__version__ = "1.1.1"
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
r"\.(?P<patch>0|[1-9]\d*)"
|
||||
r"(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?"
|
||||
r"(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?"
|
||||
)
|
||||
|
||||
|
||||
class OpenPypeVersion:
|
||||
"""Fake semver version class for OpenPype version purposes.
|
||||
|
||||
The version
|
||||
"""
|
||||
def __init__(self, major, minor, patch, prerelease, origin=None):
|
||||
self.major = major
|
||||
self.minor = minor
|
||||
self.patch = patch
|
||||
self.prerelease = prerelease
|
||||
|
||||
is_valid = True
|
||||
if major is None or minor is None or patch is None:
|
||||
is_valid = False
|
||||
self.is_valid = is_valid
|
||||
|
||||
if origin is None:
|
||||
base = "{}.{}.{}".format(str(major), str(minor), str(patch))
|
||||
if not prerelease:
|
||||
origin = base
|
||||
else:
|
||||
origin = "{}-{}".format(base, str(prerelease))
|
||||
|
||||
self.origin = origin
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, version):
|
||||
"""Create an object of version from string.
|
||||
|
||||
Args:
|
||||
version (str): Version as a string.
|
||||
|
||||
Returns:
|
||||
Union[OpenPypeVersion, None]: Version object if input is nonempty
|
||||
string otherwise None.
|
||||
"""
|
||||
|
||||
if not version:
|
||||
return None
|
||||
valid_parts = VERSION_REGEX.findall(version)
|
||||
if len(valid_parts) != 1:
|
||||
# Return invalid version with filled 'origin' attribute
|
||||
return cls(None, None, None, None, origin=str(version))
|
||||
|
||||
# Unpack found version
|
||||
major, minor, patch, pre, post = valid_parts[0]
|
||||
prerelease = pre
|
||||
# Post release is not important anymore and should be considered as
|
||||
# part of prerelease
|
||||
# - comparison is implemented to find suitable build and builds should
|
||||
# never contain prerelease part so "not proper" parsing is
|
||||
# acceptable for this use case.
|
||||
if post:
|
||||
prerelease = "{}+{}".format(pre, post)
|
||||
|
||||
return cls(
|
||||
int(major), int(minor), int(patch), prerelease, origin=version
|
||||
)
|
||||
|
||||
def has_compatible_release(self, other):
|
||||
"""Version has compatible release as other version.
|
||||
|
||||
Both major and minor versions must be exactly the same. In that case
|
||||
a build can be considered as release compatible with any version.
|
||||
|
||||
Args:
|
||||
other (OpenPypeVersion): Other version.
|
||||
|
||||
Returns:
|
||||
bool: Version is release compatible with other version.
|
||||
"""
|
||||
|
||||
if self.is_valid and other.is_valid:
|
||||
return self.major == other.major and self.minor == other.minor
|
||||
return False
|
||||
|
||||
def __bool__(self):
|
||||
return self.is_valid
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {}>".format(self.__class__.__name__, self.origin)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return self.origin == other
|
||||
return self.origin == other.origin
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return None
|
||||
|
||||
if not self.is_valid:
|
||||
return True
|
||||
|
||||
if not other.is_valid:
|
||||
return False
|
||||
|
||||
if self.origin == other.origin:
|
||||
return None
|
||||
|
||||
same_major = self.major == other.major
|
||||
if not same_major:
|
||||
return self.major < other.major
|
||||
|
||||
same_minor = self.minor == other.minor
|
||||
if not same_minor:
|
||||
return self.minor < other.minor
|
||||
|
||||
same_patch = self.patch == other.patch
|
||||
if not same_patch:
|
||||
return self.patch < other.patch
|
||||
|
||||
if not self.prerelease:
|
||||
return False
|
||||
|
||||
if not other.prerelease:
|
||||
return True
|
||||
|
||||
pres = [self.prerelease, other.prerelease]
|
||||
pres.sort()
|
||||
return pres[0] == self.prerelease
|
||||
|
||||
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
"""Get OpenPype version from provided path.
|
||||
path (str): Path to scan.
|
||||
build (bool, optional): Get only builds, not sources
|
||||
|
||||
Returns:
|
||||
Union[OpenPypeVersion, None]: version of OpenPype if found.
|
||||
"""
|
||||
|
||||
# fix path for application bundle on macos
|
||||
if platform.system().lower() == "darwin":
|
||||
path = os.path.join(path, "MacOS")
|
||||
|
||||
version_file = os.path.join(path, "openpype", "version.py")
|
||||
if not os.path.isfile(version_file):
|
||||
return None
|
||||
|
||||
# skip if the version is not build
|
||||
exe = os.path.join(path, "openpype_console.exe")
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
exe = os.path.join(path, "openpype_console")
|
||||
|
||||
# if only builds are requested
|
||||
if build and not os.path.isfile(exe): # noqa: E501
|
||||
print(" ! path is not a build: {}".format(path))
|
||||
return None
|
||||
|
||||
version = {}
|
||||
with open(version_file, "r") as vf:
|
||||
exec(vf.read(), version)
|
||||
|
||||
version_str = version.get("__version__")
|
||||
if version_str:
|
||||
return OpenPypeVersion.from_string(version_str)
|
||||
return None
|
||||
|
||||
|
||||
def get_openpype_executable():
|
||||
"""Return OpenPype Executable from Event Plug-in Settings"""
|
||||
config = RepositoryUtils.GetPluginConfig("OpenPype")
|
||||
exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
|
||||
dir_list = config.GetConfigEntryWithDefault(
|
||||
"OpenPypeInstallationDirs", "")
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
dir_list = dir_list.replace("\\ ", " ")
|
||||
return exe_list, dir_list
|
||||
|
||||
|
||||
def get_openpype_versions(dir_list):
|
||||
print(">>> Getting OpenPype executable ...")
|
||||
openpype_versions = []
|
||||
|
||||
# special case of multiple install dirs
|
||||
for dir_list in dir_list.split(","):
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print("--- Looking for OpenPype at: {}".format(install_dir))
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(" - found: {} - {}".format(version, subdir))
|
||||
openpype_versions.append((version, subdir))
|
||||
return openpype_versions
|
||||
|
||||
|
||||
def get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
):
|
||||
requested_version_obj = OpenPypeVersion.from_string(requested_version)
|
||||
if not requested_version_obj:
|
||||
print((
|
||||
">>> Requested version '{}' does not match version regex '{}'"
|
||||
).format(requested_version, VERSION_REGEX))
|
||||
return None
|
||||
|
||||
print((
|
||||
">>> Scanning for compatible requested version {}"
|
||||
).format(requested_version))
|
||||
openpype_versions = get_openpype_versions(dir_list)
|
||||
if not openpype_versions:
|
||||
return None
|
||||
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
if exe:
|
||||
exe_dir = os.path.dirname(exe)
|
||||
print("Looking for OpenPype at: {}".format(exe_dir))
|
||||
version = get_openpype_version_from_path(exe_dir)
|
||||
if version:
|
||||
print(" - found: {} - {}".format(version, exe_dir))
|
||||
openpype_versions.append((version, exe_dir))
|
||||
|
||||
matching_item = None
|
||||
compatible_versions = []
|
||||
for version_item in openpype_versions:
|
||||
version, version_dir = version_item
|
||||
if requested_version_obj.has_compatible_release(version):
|
||||
compatible_versions.append(version_item)
|
||||
if version == requested_version_obj:
|
||||
# Store version item if version match exactly
|
||||
# - break if is found matching version
|
||||
matching_item = version_item
|
||||
break
|
||||
|
||||
if not compatible_versions:
|
||||
return None
|
||||
|
||||
compatible_versions.sort(key=lambda item: item[0])
|
||||
if matching_item:
|
||||
version, version_dir = matching_item
|
||||
print((
|
||||
"*** Found exact match build version {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
else:
|
||||
version, version_dir = compatible_versions[-1]
|
||||
|
||||
print((
|
||||
"*** Latest compatible version found is {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(version_dir, "openpype_console.exe"),
|
||||
os.path.join(version_dir, "openpype_console"),
|
||||
os.path.join(version_dir, "MacOS", "openpype_console")
|
||||
]
|
||||
return FileUtils.SearchFileList(";".join(exe_list))
|
||||
|
||||
|
||||
def inject_openpype_environment(deadlinePlugin):
|
||||
""" Pull env vars from OpenPype and push them to rendering process.
|
||||
|
||||
Used for correct paths, configuration from OpenPype etc.
|
||||
"""
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
print(">>> Injecting OpenPype environments ...")
|
||||
try:
|
||||
exe_list, dir_list = get_openpype_executable()
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
exe = get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
)
|
||||
if exe is None:
|
||||
raise RuntimeError((
|
||||
"Cannot find compatible version available for version {}"
|
||||
" requested by the job. Please add it through plugin"
|
||||
" configuration in Deadline or install it to configured"
|
||||
" directory."
|
||||
).format(requested_version))
|
||||
|
||||
if not exe:
|
||||
raise RuntimeError((
|
||||
"OpenPype executable was not found in the semicolon "
|
||||
"separated list \"{}\"."
|
||||
"The path to the render executable can be configured"
|
||||
" from the Plugin Configuration in the Deadline Monitor."
|
||||
).format(";".join(exe_list)))
|
||||
|
||||
print("--- OpenPype executable: {}".format(exe))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
datetime.utcnow().strftime("%Y%m%d%H%M%S%f"),
|
||||
str(uuid.uuid1())
|
||||
)
|
||||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
add_kwargs = {
|
||||
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
|
||||
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
|
||||
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
|
||||
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
|
||||
"envgroup": "farm"
|
||||
}
|
||||
|
||||
# use legacy IS_TEST env var to mark automatic tests for OP
|
||||
if job.GetJobEnvironmentKeyValue("IS_TEST"):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
if all(add_kwargs.values()):
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
else:
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
|
||||
" AVALON_TASK, AVALON_APP_NAME"
|
||||
))
|
||||
|
||||
openpype_mongo = job.GetJobEnvironmentKeyValue("OPENPYPE_MONGO")
|
||||
if openpype_mongo:
|
||||
# inject env var for OP extractenvironments
|
||||
# SetEnvironmentVariable is important, not SetProcessEnv...
|
||||
deadlinePlugin.SetEnvironmentVariable("OPENPYPE_MONGO",
|
||||
openpype_mongo)
|
||||
|
||||
if not os.environ.get("OPENPYPE_MONGO"):
|
||||
print(">>> Missing OPENPYPE_MONGO env var, process won't work")
|
||||
|
||||
os.environ["AVALON_TIMEOUT"] = "5000"
|
||||
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run OpenPype process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
contents = json.load(fp)
|
||||
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
if "PATH" in contents:
|
||||
# Set os.environ[PATH] so studio settings' path entries
|
||||
# can be used to define search path for executables.
|
||||
print(f">>> Setting 'PATH' Environment to: {contents['PATH']}")
|
||||
os.environ["PATH"] = contents["PATH"]
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
||||
print(">>> Removing temporary file")
|
||||
os.remove(export_url)
|
||||
|
||||
print(">> Injection end.")
|
||||
except Exception as e:
|
||||
if hasattr(e, "output"):
|
||||
print(">>> Exception {}".format(e.output))
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
print("!!! Injection failed.")
|
||||
RepositoryUtils.FailJob(job)
|
||||
raise
|
||||
|
||||
|
||||
def inject_ayon_environment(deadlinePlugin):
|
||||
""" Pull env vars from AYON and push them to rendering process.
|
||||
|
||||
Used for correct paths, configuration from AYON etc.
|
||||
"""
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
print(">>> Injecting AYON environments ...")
|
||||
try:
|
||||
exe_list = get_ayon_executable()
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
if not exe:
|
||||
raise RuntimeError((
|
||||
"Ayon executable was not found in the semicolon "
|
||||
"separated list \"{}\"."
|
||||
"The path to the render executable can be configured"
|
||||
" from the Plugin Configuration in the Deadline Monitor."
|
||||
).format(exe_list))
|
||||
|
||||
print("--- Ayon executable: {}".format(exe))
|
||||
|
||||
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
|
||||
if not ayon_bundle_name:
|
||||
raise RuntimeError(
|
||||
"Missing env var in job properties AYON_BUNDLE_NAME"
|
||||
)
|
||||
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
ayon_server_url = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
|
||||
config.GetConfigEntryWithDefault("AyonServerUrl", "")
|
||||
)
|
||||
ayon_api_key = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
|
||||
config.GetConfigEntryWithDefault("AyonApiKey", "")
|
||||
)
|
||||
|
||||
if not all([ayon_server_url, ayon_api_key]):
|
||||
raise RuntimeError((
|
||||
"Missing required values for server url and api key. "
|
||||
"Please fill in Ayon Deadline plugin or provide by "
|
||||
"AYON_SERVER_URL and AYON_API_KEY"
|
||||
))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
datetime.utcnow().strftime("%Y%m%d%H%M%S%f"),
|
||||
str(uuid.uuid1())
|
||||
)
|
||||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
add_kwargs = {
|
||||
"envgroup": "farm",
|
||||
}
|
||||
# Support backwards compatible keys
|
||||
for key, env_keys in (
|
||||
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
|
||||
("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
|
||||
("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]),
|
||||
):
|
||||
value = ""
|
||||
for env_key in env_keys:
|
||||
value = job.GetJobEnvironmentKeyValue(env_key)
|
||||
if value:
|
||||
break
|
||||
add_kwargs[key] = value
|
||||
|
||||
if not all(add_kwargs.values()):
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AYON_PROJECT_NAME,"
|
||||
" AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
# Use applications addon arguments
|
||||
# TODO validate if applications addon should be used
|
||||
args = [
|
||||
"--headless",
|
||||
"addon",
|
||||
"applications",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
# Backwards compatibility for older versions
|
||||
legacy_args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
# Legacy arguments expect '--asset' instead of '--folder'
|
||||
if key == "folder":
|
||||
key = "asset"
|
||||
legacy_args.extend(["--{}".format(key), value])
|
||||
|
||||
environment = {
|
||||
"AYON_SERVER_URL": ayon_server_url,
|
||||
"AYON_API_KEY": ayon_api_key,
|
||||
"AYON_BUNDLE_NAME": ayon_bundle_name,
|
||||
}
|
||||
|
||||
automatic_tests = job.GetJobEnvironmentKeyValue("AYON_IN_TESTS")
|
||||
if automatic_tests:
|
||||
environment["AYON_IN_TESTS"] = automatic_tests
|
||||
for env, val in environment.items():
|
||||
# Add the env var for the Render Plugin that is about to render
|
||||
deadlinePlugin.SetEnvironmentVariable(env, val)
|
||||
# Add the env var for current calls to `DeadlinePlugin.RunProcess`
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(env, val)
|
||||
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
|
||||
if process_exitcode != 0:
|
||||
print(
|
||||
"Failed to run AYON process to extract environments. Trying"
|
||||
" to use legacy arguments."
|
||||
)
|
||||
legacy_args_str = subprocess.list2cmdline(legacy_args)
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, legacy_args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run AYON process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
contents = json.load(fp)
|
||||
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
if "PATH" in contents:
|
||||
# Set os.environ[PATH] so studio settings' path entries
|
||||
# can be used to define search path for executables.
|
||||
print(f">>> Setting 'PATH' Environment to: {contents['PATH']}")
|
||||
os.environ["PATH"] = contents["PATH"]
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
||||
print(">>> Removing temporary file")
|
||||
os.remove(export_url)
|
||||
|
||||
print(">> Injection end.")
|
||||
except Exception as e:
|
||||
if hasattr(e, "output"):
|
||||
print(">>> Exception {}".format(e.output))
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
print("!!! Injection failed.")
|
||||
RepositoryUtils.FailJob(job)
|
||||
raise
|
||||
|
||||
|
||||
def get_ayon_executable():
|
||||
"""Return AYON Executable from Event Plug-in Settings
|
||||
|
||||
Returns:
|
||||
list[str]: AYON executable paths.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When no path configured at all.
|
||||
|
||||
"""
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
exe_list = config.GetConfigEntryWithDefault("AyonExecutable", "")
|
||||
|
||||
if not exe_list:
|
||||
raise RuntimeError(
|
||||
"Path to AYON executable not configured."
|
||||
"Please set it in Ayon Deadline Plugin."
|
||||
)
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
|
||||
# Expand user paths
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
return ";".join(expanded_paths)
|
||||
|
||||
|
||||
def inject_render_job_id(deadlinePlugin):
|
||||
"""Inject dependency ids to publish process as env var for validation."""
|
||||
print(">>> Injecting render job id ...")
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
dependency_ids = job.JobDependencyIDs
|
||||
print(">>> Dependency IDs: {}".format(dependency_ids))
|
||||
render_job_ids = ",".join(dependency_ids)
|
||||
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(
|
||||
"RENDER_JOB_IDS", render_job_ids
|
||||
)
|
||||
print(">>> Injection end.")
|
||||
|
||||
|
||||
def __main__(deadlinePlugin):
|
||||
print("*** GlobalJobPreload {} start ...".format(__version__))
|
||||
print(">>> Getting job ...")
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
openpype_render_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_RENDER_JOB")
|
||||
openpype_publish_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_PUBLISH_JOB")
|
||||
openpype_remote_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_REMOTE_PUBLISH")
|
||||
|
||||
if openpype_publish_job == "1" and openpype_render_job == "1":
|
||||
raise RuntimeError(
|
||||
"Misconfiguration. Job couldn't be both render and publish."
|
||||
)
|
||||
|
||||
if openpype_publish_job == "1":
|
||||
inject_render_job_id(deadlinePlugin)
|
||||
if openpype_render_job == "1" or openpype_remote_job == "1":
|
||||
inject_openpype_environment(deadlinePlugin)
|
||||
|
||||
ayon_render_job = job.GetJobEnvironmentKeyValue("AYON_RENDER_JOB")
|
||||
ayon_publish_job = job.GetJobEnvironmentKeyValue("AYON_PUBLISH_JOB")
|
||||
ayon_remote_job = job.GetJobEnvironmentKeyValue("AYON_REMOTE_PUBLISH")
|
||||
|
||||
if ayon_publish_job == "1" and ayon_render_job == "1":
|
||||
raise RuntimeError(
|
||||
"Misconfiguration. Job couldn't be both render and publish."
|
||||
)
|
||||
|
||||
if ayon_publish_job == "1":
|
||||
inject_render_job_id(deadlinePlugin)
|
||||
if ayon_render_job == "1" or ayon_remote_job == "1":
|
||||
inject_ayon_environment(deadlinePlugin)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 KiB |
|
|
@ -1,532 +0,0 @@
|
|||
[SceneFile]
|
||||
Type=filename
|
||||
Label=Scene Filename
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=0
|
||||
Description=The scene filename as it exists on the network.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Environment]
|
||||
Type=filename
|
||||
Label=Scene Environment
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Description=The Environment for the scene.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Job]
|
||||
Type=filename
|
||||
Label=Scene Job
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=2
|
||||
Description=The Job that the scene belongs to.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[SceneName]
|
||||
Type=filename
|
||||
Label=Scene Name
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=3
|
||||
Description=The name of the scene to render
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[SceneVersion]
|
||||
Type=filename
|
||||
Label=Scene Version
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Description=The version of the scene to render.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Version]
|
||||
Type=enum
|
||||
Values=10;11;12
|
||||
Label=Harmony Version
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=5
|
||||
Description=The version of Harmony to use.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[IsDatabase]
|
||||
Type=Boolean
|
||||
Label=Is Database Scene
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=6
|
||||
Description=Whether or not the scene is in the database or not
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Camera]
|
||||
Type=string
|
||||
Label=Camera
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=0
|
||||
Description=Specifies the camera to use for rendering images. If Blank, the scene will be rendered with the current Camera.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[UsingResPreset]
|
||||
Type=Boolean
|
||||
Label=Use Resolution Preset
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=1
|
||||
Description=Whether or not you are using a resolution preset.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[ResolutionName]
|
||||
Type=enum
|
||||
Values=HDTV_1080p24;HDTV_1080p25;HDTV_720p24;4K_UHD;8K_UHD;DCI_2K;DCI_4K;film-2K;film-4K;film-1.33_H;film-1.66_H;film-1.66_V;Cineon;NTSC;PAL;2160p;1440p;1080p;720p;480p;360p;240p;low;Web_Video;Game_512;Game_512_Ortho;WebCC_Preview;Custom
|
||||
Label=Resolution Preset
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=2
|
||||
Description=The resolution preset to use.
|
||||
Required=true
|
||||
Default=HDTV_1080p24
|
||||
|
||||
[PresetName]
|
||||
Type=string
|
||||
Label=Preset Name
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=3
|
||||
Description=Specify the custom resolution name.
|
||||
Required=true
|
||||
Default=
|
||||
|
||||
[ResolutionX]
|
||||
Type=integer
|
||||
Label=Resolution X
|
||||
Minimum=0
|
||||
Maximum=1000000
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=4
|
||||
Description=Specifies the width of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=1920
|
||||
|
||||
[ResolutionY]
|
||||
Type=integer
|
||||
Label=Resolution Y
|
||||
Minimum=0
|
||||
Maximum=1000000
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=5
|
||||
Description=Specifies the height of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=1080
|
||||
|
||||
[FieldOfView]
|
||||
Type=float
|
||||
Label=Field Of View
|
||||
Minimum=0
|
||||
Maximum=89
|
||||
DecimalPlaces=2
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=6
|
||||
Description=Specifies the field of view of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=41.11
|
||||
|
||||
[Output0Node]
|
||||
Type=string
|
||||
Label=Render Node 0 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 0 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=1
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Path]
|
||||
Type=string
|
||||
Label=Render Node 0 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=2
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 0 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=3
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Format]
|
||||
Type=string
|
||||
Label=Render Node 0 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=4
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 0 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=5
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Node]
|
||||
Type=string
|
||||
Label=Render Node 1 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=6
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 1 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=7
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Path]
|
||||
Type=string
|
||||
Label=Render Node 1 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=8
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 1 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=9
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Format]
|
||||
Type=string
|
||||
Label=Render Node 1 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=10
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 1 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=11
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Node]
|
||||
Type=string
|
||||
Label=Render Node 2 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=12
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 2 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=13
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Path]
|
||||
Type=string
|
||||
Label=Render Node 2 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=14
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 2 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=15
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Format]
|
||||
Type=string
|
||||
Label=Render Node 2 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=16
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 2 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=17
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Node]
|
||||
Type=string
|
||||
Label=Render Node 3 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=18
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 3 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=19
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Path]
|
||||
Type=string
|
||||
Label=Render Node 3 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=20
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 3 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=21
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Format]
|
||||
Type=string
|
||||
Label=Render Node 3 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=22
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 3 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=23
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Node]
|
||||
Type=string
|
||||
Label=Render Node 4 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=24
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 4 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=25
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Path]
|
||||
Type=string
|
||||
Label=Render Node 4 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=26
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 4 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=27
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Format]
|
||||
Type=string
|
||||
Label=Render Node 4 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=28
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 4 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=29
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Node]
|
||||
Type=string
|
||||
Label=Render Node 5 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=30
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 5 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=31
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Path]
|
||||
Type=string
|
||||
Label=Render Node 5 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=32
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 5 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=33
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Format]
|
||||
Type=string
|
||||
Label=Render Node 5 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=34
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 5 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=35
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Harmony Render Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[ConcurrentTasks]
|
||||
Type=label
|
||||
Label=ConcurrentTasks
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=True
|
||||
Description=Not configurable
|
||||
|
||||
[Harmony_RenderExecutable_10]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=0
|
||||
Label=Harmony 10 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 10.0\win64\bin\Stage.exe
|
||||
|
||||
[Harmony_RenderExecutable_11]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Label=Harmony 11 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 11.0\win64\bin\Stage.exe
|
||||
|
||||
[Harmony_RenderExecutable_12]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=2
|
||||
Label=Harmony 12 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 12.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 12.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_12/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_14]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=3
|
||||
Label=Harmony 14 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 14.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 14.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_14/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_15]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 15 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 15.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 15.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_15.0/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_17]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 17 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 17 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 17 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_17/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_20]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 20 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_21]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 21 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 21 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 21 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_21/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_22]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 22 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 22 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 22 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_22/lnx86_64/bin/HarmonyPremium
|
||||
|
|
@ -1,151 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from System import *
|
||||
from System.Diagnostics import *
|
||||
from System.IO import *
|
||||
from System.Text import *
|
||||
|
||||
from Deadline.Plugins import *
|
||||
from Deadline.Scripting import *
|
||||
|
||||
def GetDeadlinePlugin():
|
||||
return HarmonyAYONPlugin()
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
class HarmonyAYONPlugin(DeadlinePlugin):
|
||||
|
||||
def __init__( self ):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
self.CheckExitCodeCallback += self.CheckExitCode
|
||||
|
||||
def Cleanup( self ):
|
||||
print("Cleanup")
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
def CheckExitCode( self, exitCode ):
|
||||
print("check code")
|
||||
if exitCode != 0:
|
||||
if exitCode == 100:
|
||||
self.LogInfo( "Renderer reported an error with error code 100. This will be ignored, since the option to ignore it is specified in the Job Properties." )
|
||||
else:
|
||||
self.FailRender( "Renderer returned non-zero error code %d. Check the renderer's output." % exitCode )
|
||||
|
||||
def InitializeProcess( self ):
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
self.AddStdoutHandlerCallback( "Rendered frame ([0-9]+)" ).HandleCallback += self.HandleStdoutProgress
|
||||
|
||||
def HandleStdoutProgress( self ):
|
||||
startFrame = self.GetStartFrame()
|
||||
endFrame = self.GetEndFrame()
|
||||
if( endFrame - startFrame + 1 != 0 ):
|
||||
self.SetProgress( 100 * ( int(self.GetRegexMatch(1)) - startFrame + 1 ) / ( endFrame - startFrame + 1 ) )
|
||||
|
||||
def RenderExecutable( self ):
|
||||
version = int( self.GetPluginInfoEntry( "Version" ) )
|
||||
exe = ""
|
||||
exeList = self.GetConfigEntry( "Harmony_RenderExecutable_" + str(version) )
|
||||
exe = FileUtils.SearchFileList( exeList )
|
||||
if( exe == "" ):
|
||||
self.FailRender( "Harmony render executable was not found in the configured separated list \"" + exeList + "\". The path to the render executable can be configured from the Plugin Configuration in the Deadline Monitor." )
|
||||
return exe
|
||||
|
||||
def RenderArgument( self ):
|
||||
renderArguments = "-batch"
|
||||
|
||||
if self.GetBooleanPluginInfoEntryWithDefault( "UsingResPreset", False ):
|
||||
resName = self.GetPluginInfoEntryWithDefault( "ResolutionName", "HDTV_1080p24" )
|
||||
if resName == "Custom":
|
||||
renderArguments += " -res " + self.GetPluginInfoEntryWithDefault( "PresetName", "HDTV_1080p24" )
|
||||
else:
|
||||
renderArguments += " -res " + resName
|
||||
else:
|
||||
resolutionX = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionX", -1 )
|
||||
resolutionY = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionY", -1 )
|
||||
fov = self.GetFloatPluginInfoEntryWithDefault( "FieldOfView", -1 )
|
||||
|
||||
if resolutionX > 0 and resolutionY > 0 and fov > 0:
|
||||
renderArguments += " -res " + str( resolutionX ) + " " + str( resolutionY ) + " " + str( fov )
|
||||
|
||||
camera = self.GetPluginInfoEntryWithDefault( "Camera", "" )
|
||||
|
||||
if not camera == "":
|
||||
renderArguments += " -camera " + camera
|
||||
|
||||
startFrame = str( self.GetStartFrame() )
|
||||
endFrame = str( self.GetEndFrame() )
|
||||
|
||||
renderArguments += " -frames " + startFrame + " " + endFrame
|
||||
|
||||
if not self.GetBooleanPluginInfoEntryWithDefault( "IsDatabase", False ):
|
||||
sceneFilename = self.GetPluginInfoEntryWithDefault( "SceneFile", self.GetDataFilename() )
|
||||
sceneFilename = RepositoryUtils.CheckPathMapping( sceneFilename )
|
||||
renderArguments += " \"" + sceneFilename + "\""
|
||||
else:
|
||||
environment = self.GetPluginInfoEntryWithDefault( "Environment", "" )
|
||||
renderArguments += " -env " + environment
|
||||
job = self.GetPluginInfoEntryWithDefault( "Job", "" )
|
||||
renderArguments += " -job " + job
|
||||
scene = self.GetPluginInfoEntryWithDefault( "SceneName", "" )
|
||||
renderArguments += " -scene " + scene
|
||||
version = self.GetPluginInfoEntryWithDefault( "SceneVersion", "" )
|
||||
renderArguments += " -version " + version
|
||||
|
||||
#tempSceneDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
#preRenderScript =
|
||||
rendernodeNum = 0
|
||||
scriptBuilder = StringBuilder()
|
||||
|
||||
while True:
|
||||
nodeName = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Node", "" )
|
||||
if nodeName == "":
|
||||
break
|
||||
nodeType = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Type", "Image" )
|
||||
if nodeType == "Image":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
nodeLeadingZero = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "LeadingZero", "" )
|
||||
nodeFormat = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Format", "" )
|
||||
nodeStartFrame = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "StartFrame", "" )
|
||||
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingName\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
if not nodeLeadingZero == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"leadingZeros\", 1, \"" + nodeLeadingZero + "\" );")
|
||||
|
||||
if not nodeFormat == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingType\", 1, \"" + nodeFormat + "\" );")
|
||||
|
||||
if not nodeStartFrame == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"start\", 1, \"" + nodeStartFrame + "\" );")
|
||||
|
||||
if nodeType == "Movie":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"moviePath\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
rendernodeNum += 1
|
||||
|
||||
tempDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
preRenderScriptName = Path.Combine( tempDirectory, "preRenderScript.txt" )
|
||||
|
||||
File.WriteAllText( preRenderScriptName, scriptBuilder.ToString() )
|
||||
|
||||
preRenderInlineScript = self.GetPluginInfoEntryWithDefault( "PreRenderInlineScript", "" )
|
||||
if preRenderInlineScript:
|
||||
renderArguments += " -preRenderInlineScript \"" + preRenderInlineScript +"\""
|
||||
|
||||
renderArguments += " -preRenderScript \"" + preRenderScriptName +"\""
|
||||
|
||||
return renderArguments
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 124 KiB |
|
|
@ -1,35 +0,0 @@
|
|||
[OIIOToolPath]
|
||||
Type=filename
|
||||
Label=OIIO Tool location
|
||||
Category=OIIO
|
||||
Index=0
|
||||
Description=OIIO Tool executable to use.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[OutputFile]
|
||||
Type=filenamesave
|
||||
Label=Output File
|
||||
Category=Output
|
||||
Index=0
|
||||
Description=The scene filename as it exists on the network
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[CleanupTiles]
|
||||
Type=boolean
|
||||
Category=Options
|
||||
Index=0
|
||||
Label=Cleanup Tiles
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
Description=If enabled, the OpenPype Tile Assembler will cleanup all tiles after assembly.
|
||||
|
||||
[Renderer]
|
||||
Type=string
|
||||
Label=Renderer
|
||||
Category=Quicktime Info
|
||||
Index=0
|
||||
Description=Renderer name
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=OpenPype Tile Assembler Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[OIIOTool_RenderExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=OIIO Tool Executable
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Default=C:\Program Files\OIIO\bin\oiiotool.exe;/usr/bin/oiiotool
|
||||
Description=The path to the Open Image IO Tool executable file used for rendering. Enter alternative paths on separate lines.
|
||||
W
|
||||
|
|
@ -1,457 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tile Assembler Plugin using Open Image IO tool.
|
||||
|
||||
Todo:
|
||||
Currently we support only EXRs with their data window set.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from System.IO import Path
|
||||
|
||||
from Deadline.Plugins import DeadlinePlugin
|
||||
from Deadline.Scripting import (
|
||||
FileUtils, RepositoryUtils, SystemUtils)
|
||||
|
||||
|
||||
version_major = 1
|
||||
version_minor = 0
|
||||
version_patch = 0
|
||||
version_string = "{}.{}.{}".format(version_major, version_minor, version_patch)
|
||||
STRING_TAGS = {
|
||||
"format"
|
||||
}
|
||||
INT_TAGS = {
|
||||
"x", "y", "z",
|
||||
"width", "height", "depth",
|
||||
"full_x", "full_y", "full_z",
|
||||
"full_width", "full_height", "full_depth",
|
||||
"tile_width", "tile_height", "tile_depth",
|
||||
"nchannels",
|
||||
"alpha_channel",
|
||||
"z_channel",
|
||||
"deep",
|
||||
"subimages",
|
||||
}
|
||||
|
||||
|
||||
XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;")
|
||||
|
||||
# Regex to parse array attributes
|
||||
ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$")
|
||||
|
||||
|
||||
def convert_value_by_type_name(value_type, value):
|
||||
"""Convert value to proper type based on type name.
|
||||
|
||||
In some cases value types have custom python class.
|
||||
"""
|
||||
|
||||
# Simple types
|
||||
if value_type == "string":
|
||||
return value
|
||||
|
||||
if value_type == "int":
|
||||
return int(value)
|
||||
|
||||
if value_type == "float":
|
||||
return float(value)
|
||||
|
||||
# Vectors will probably have more types
|
||||
if value_type in ("vec2f", "float2"):
|
||||
return [float(item) for item in value.split(",")]
|
||||
|
||||
# Matrix should be always have square size of element 3x3, 4x4
|
||||
# - are returned as list of lists
|
||||
if value_type == "matrix":
|
||||
output = []
|
||||
current_index = -1
|
||||
parts = value.split(",")
|
||||
parts_len = len(parts)
|
||||
if parts_len == 1:
|
||||
divisor = 1
|
||||
elif parts_len == 4:
|
||||
divisor = 2
|
||||
elif parts_len == 9:
|
||||
divisor = 3
|
||||
elif parts_len == 16:
|
||||
divisor = 4
|
||||
else:
|
||||
print("Unknown matrix resolution {}. Value: \"{}\"".format(
|
||||
parts_len, value
|
||||
))
|
||||
for part in parts:
|
||||
output.append(float(part))
|
||||
return output
|
||||
|
||||
for idx, item in enumerate(parts):
|
||||
list_index = idx % divisor
|
||||
if list_index > current_index:
|
||||
current_index = list_index
|
||||
output.append([])
|
||||
output[list_index].append(float(item))
|
||||
return output
|
||||
|
||||
if value_type == "rational2i":
|
||||
parts = value.split("/")
|
||||
top = float(parts[0])
|
||||
bottom = 1.0
|
||||
if len(parts) != 1:
|
||||
bottom = float(parts[1])
|
||||
return float(top) / float(bottom)
|
||||
|
||||
if value_type == "vector":
|
||||
parts = [part.strip() for part in value.split(",")]
|
||||
output = []
|
||||
for part in parts:
|
||||
if part == "-nan":
|
||||
output.append(None)
|
||||
continue
|
||||
try:
|
||||
part = float(part)
|
||||
except ValueError:
|
||||
pass
|
||||
output.append(part)
|
||||
return output
|
||||
|
||||
if value_type == "timecode":
|
||||
return value
|
||||
|
||||
# Array of other types is converted to list
|
||||
re_result = ARRAY_TYPE_REGEX.findall(value_type)
|
||||
if re_result:
|
||||
array_type = re_result[0]
|
||||
output = []
|
||||
for item in value.split(","):
|
||||
output.append(
|
||||
convert_value_by_type_name(array_type, item)
|
||||
)
|
||||
return output
|
||||
|
||||
print((
|
||||
"Dev note (missing implementation):"
|
||||
" Unknown attrib type \"{}\". Value: {}"
|
||||
).format(value_type, value))
|
||||
return value
|
||||
|
||||
|
||||
def parse_oiio_xml_output(xml_string):
|
||||
"""Parse xml output from OIIO info command."""
|
||||
output = {}
|
||||
if not xml_string:
|
||||
return output
|
||||
|
||||
# Fix values with ampresand (lazy fix)
|
||||
# - oiiotool exports invalid xml which ElementTree can't handle
|
||||
# e.g. ""
|
||||
# WARNING: this will affect even valid character entities. If you need
|
||||
# those values correctly, this must take care of valid character ranges.
|
||||
# See https://github.com/pypeclub/OpenPype/pull/2729
|
||||
matches = XML_CHAR_REF_REGEX_HEX.findall(xml_string)
|
||||
for match in matches:
|
||||
new_value = match.replace("&", "&")
|
||||
xml_string = xml_string.replace(match, new_value)
|
||||
|
||||
tree = xml.etree.ElementTree.fromstring(xml_string)
|
||||
attribs = {}
|
||||
output["attribs"] = attribs
|
||||
for child in tree:
|
||||
tag_name = child.tag
|
||||
if tag_name == "attrib":
|
||||
attrib_def = child.attrib
|
||||
value = convert_value_by_type_name(
|
||||
attrib_def["type"], child.text
|
||||
)
|
||||
|
||||
attribs[attrib_def["name"]] = value
|
||||
continue
|
||||
|
||||
# Channels are stored as tex on each child
|
||||
if tag_name == "channelnames":
|
||||
value = []
|
||||
for channel in child:
|
||||
value.append(channel.text)
|
||||
|
||||
# Convert known integer type tags to int
|
||||
elif tag_name in INT_TAGS:
|
||||
value = int(child.text)
|
||||
|
||||
# Keep value of known string tags
|
||||
elif tag_name in STRING_TAGS:
|
||||
value = child.text
|
||||
|
||||
# Keep value as text for unknown tags
|
||||
# - feel free to add more tags
|
||||
else:
|
||||
value = child.text
|
||||
print((
|
||||
"Dev note (missing implementation):"
|
||||
" Unknown tag \"{}\". Value \"{}\""
|
||||
).format(tag_name, value))
|
||||
|
||||
output[child.tag] = value
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def info_about_input(oiiotool_path, filepath):
|
||||
args = [
|
||||
oiiotool_path,
|
||||
"--info",
|
||||
"-v",
|
||||
"-i:infoformat=xml",
|
||||
filepath
|
||||
]
|
||||
popen = subprocess.Popen(args, stdout=subprocess.PIPE)
|
||||
_stdout, _stderr = popen.communicate()
|
||||
output = ""
|
||||
if _stdout:
|
||||
output += _stdout.decode("utf-8", errors="backslashreplace")
|
||||
|
||||
if _stderr:
|
||||
output += _stderr.decode("utf-8", errors="backslashreplace")
|
||||
|
||||
output = output.replace("\r\n", "\n")
|
||||
xml_started = False
|
||||
lines = []
|
||||
for line in output.split("\n"):
|
||||
if not xml_started:
|
||||
if not line.startswith("<"):
|
||||
continue
|
||||
xml_started = True
|
||||
if xml_started:
|
||||
lines.append(line)
|
||||
|
||||
if not xml_started:
|
||||
raise ValueError(
|
||||
"Failed to read input file \"{}\".\nOutput:\n{}".format(
|
||||
filepath, output
|
||||
)
|
||||
)
|
||||
xml_text = "\n".join(lines)
|
||||
return parse_oiio_xml_output(xml_text)
|
||||
|
||||
|
||||
def GetDeadlinePlugin(): # noqa: N802
|
||||
"""Helper."""
|
||||
return OpenPypeTileAssembler()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin): # noqa: N802, N803
|
||||
"""Helper."""
|
||||
deadlinePlugin.cleanup()
|
||||
|
||||
|
||||
class OpenPypeTileAssembler(DeadlinePlugin):
|
||||
"""Deadline plugin for assembling tiles using OIIO."""
|
||||
|
||||
def __init__(self):
|
||||
"""Init."""
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.initialize_process
|
||||
self.RenderExecutableCallback += self.render_executable
|
||||
self.RenderArgumentCallback += self.render_argument
|
||||
self.PreRenderTasksCallback += self.pre_render_tasks
|
||||
self.PostRenderTasksCallback += self.post_render_tasks
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup function."""
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
del self.PreRenderTasksCallback
|
||||
del self.PostRenderTasksCallback
|
||||
|
||||
def initialize_process(self):
|
||||
"""Initialization."""
|
||||
self.LogInfo("Plugin version: {}".format(version_string))
|
||||
self.SingleFramesOnly = True
|
||||
self.StdoutHandling = True
|
||||
self.renderer = self.GetPluginInfoEntryWithDefault(
|
||||
"Renderer", "undefined")
|
||||
self.AddStdoutHandlerCallback(
|
||||
".*Error.*").HandleCallback += self.handle_stdout_error
|
||||
|
||||
def render_executable(self):
|
||||
"""Get render executable name.
|
||||
|
||||
Get paths from plugin configuration, find executable and return it.
|
||||
|
||||
Returns:
|
||||
(str): Render executable.
|
||||
|
||||
"""
|
||||
oiiotool_exe_list = self.GetConfigEntry("OIIOTool_RenderExecutable")
|
||||
oiiotool_exe = FileUtils.SearchFileList(oiiotool_exe_list)
|
||||
|
||||
if oiiotool_exe == "":
|
||||
self.FailRender(("No file found in the semicolon separated "
|
||||
"list \"{}\". The path to the render executable "
|
||||
"can be configured from the Plugin Configuration "
|
||||
"in the Deadline Monitor.").format(
|
||||
oiiotool_exe_list))
|
||||
|
||||
return oiiotool_exe
|
||||
|
||||
def render_argument(self):
|
||||
"""Generate command line arguments for render executable.
|
||||
|
||||
Returns:
|
||||
(str): arguments to add to render executable.
|
||||
|
||||
"""
|
||||
# Read tile config file. This file is in compatible format with
|
||||
# Draft Tile Assembler
|
||||
data = {}
|
||||
with open(self.config_file, "rU") as f:
|
||||
for text in f:
|
||||
# Parsing key-value pair and removing white-space
|
||||
# around the entries
|
||||
info = [x.strip() for x in text.split("=", 1)]
|
||||
|
||||
if len(info) > 1:
|
||||
try:
|
||||
data[str(info[0])] = info[1]
|
||||
except Exception as e:
|
||||
# should never be called
|
||||
self.FailRender(
|
||||
"Cannot parse config file: {}".format(e))
|
||||
|
||||
# Get output file. We support only EXRs now.
|
||||
output_file = data["ImageFileName"]
|
||||
output_file = RepositoryUtils.CheckPathMapping(output_file)
|
||||
output_file = self.process_path(output_file)
|
||||
|
||||
tile_info = []
|
||||
for tile in range(int(data["TileCount"])):
|
||||
tile_info.append({
|
||||
"filepath": data["Tile{}".format(tile)],
|
||||
"pos_x": int(data["Tile{}X".format(tile)]),
|
||||
"pos_y": int(data["Tile{}Y".format(tile)]),
|
||||
"height": int(data["Tile{}Height".format(tile)]),
|
||||
"width": int(data["Tile{}Width".format(tile)])
|
||||
})
|
||||
|
||||
arguments = self.tile_oiio_args(
|
||||
int(data["ImageWidth"]), int(data["ImageHeight"]),
|
||||
tile_info, output_file)
|
||||
self.LogInfo(
|
||||
"Using arguments: {}".format(" ".join(arguments)))
|
||||
self.tiles = tile_info
|
||||
return " ".join(arguments)
|
||||
|
||||
def process_path(self, filepath):
|
||||
"""Handle slashes in file paths."""
|
||||
if SystemUtils.IsRunningOnWindows():
|
||||
filepath = filepath.replace("/", "\\")
|
||||
if filepath.startswith("\\") and not filepath.startswith("\\\\"):
|
||||
filepath = "\\" + filepath
|
||||
else:
|
||||
filepath = filepath.replace("\\", "/")
|
||||
return filepath
|
||||
|
||||
def pre_render_tasks(self):
|
||||
"""Load config file and do remapping."""
|
||||
self.LogInfo("OpenPype Tile Assembler starting...")
|
||||
config_file = self.GetPluginInfoEntry("ConfigFile")
|
||||
|
||||
temp_scene_directory = self.CreateTempDirectory(
|
||||
"thread" + str(self.GetThreadNumber()))
|
||||
temp_scene_filename = Path.GetFileName(config_file)
|
||||
self.config_file = Path.Combine(
|
||||
temp_scene_directory, temp_scene_filename)
|
||||
|
||||
if SystemUtils.IsRunningOnWindows():
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
config_file, self.config_file, "/", "\\")
|
||||
else:
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
config_file, self.config_file, "\\", "/")
|
||||
os.chmod(self.config_file, os.stat(self.config_file).st_mode)
|
||||
|
||||
def post_render_tasks(self):
|
||||
"""Cleanup tiles if required."""
|
||||
if self.GetBooleanPluginInfoEntryWithDefault("CleanupTiles", False):
|
||||
self.LogInfo("Cleaning up Tiles...")
|
||||
for tile in self.tiles:
|
||||
try:
|
||||
self.LogInfo("Deleting: {}".format(tile["filepath"]))
|
||||
os.remove(tile["filepath"])
|
||||
# By this time we would have errored out
|
||||
# if error on missing was enabled
|
||||
except KeyError:
|
||||
pass
|
||||
except OSError:
|
||||
self.LogInfo("Failed to delete: {}".format(
|
||||
tile["filepath"]))
|
||||
pass
|
||||
|
||||
self.LogInfo("OpenPype Tile Assembler Job finished.")
|
||||
|
||||
def handle_stdout_error(self):
|
||||
"""Handle errors in stdout."""
|
||||
self.FailRender(self.GetRegexMatch(0))
|
||||
|
||||
def tile_oiio_args(
|
||||
self, output_width, output_height, tile_info, output_path):
|
||||
"""Generate oiio tool arguments for tile assembly.
|
||||
|
||||
Args:
|
||||
output_width (int): Width of output image.
|
||||
output_height (int): Height of output image.
|
||||
tile_info (list): List of tile items, each item must be
|
||||
dictionary with `filepath`, `pos_x` and `pos_y` keys
|
||||
representing path to file and x, y coordinates on output
|
||||
image where top-left point of tile item should start.
|
||||
output_path (str): Path to file where should be output stored.
|
||||
|
||||
Returns:
|
||||
(list): oiio tools arguments.
|
||||
|
||||
"""
|
||||
args = []
|
||||
|
||||
# Create new image with output resolution, and with same type and
|
||||
# channels as input
|
||||
oiiotool_path = self.render_executable()
|
||||
first_tile_path = tile_info[0]["filepath"]
|
||||
first_tile_info = info_about_input(oiiotool_path, first_tile_path)
|
||||
create_arg_template = "--create{} {}x{} {}"
|
||||
|
||||
image_type = ""
|
||||
image_format = first_tile_info.get("format")
|
||||
if image_format:
|
||||
image_type = ":type={}".format(image_format)
|
||||
|
||||
create_arg = create_arg_template.format(
|
||||
image_type, output_width,
|
||||
output_height, first_tile_info["nchannels"]
|
||||
)
|
||||
args.append(create_arg)
|
||||
|
||||
for tile in tile_info:
|
||||
path = tile["filepath"]
|
||||
pos_x = tile["pos_x"]
|
||||
tile_height = info_about_input(oiiotool_path, path)["height"]
|
||||
if self.renderer == "vray":
|
||||
pos_y = tile["pos_y"]
|
||||
else:
|
||||
pos_y = output_height - tile["pos_y"] - tile_height
|
||||
|
||||
# Add input path and make sure inputs origin is 0, 0
|
||||
args.append(path)
|
||||
args.append("--origin +0+0")
|
||||
# Swap to have input as foreground
|
||||
args.append("--swap")
|
||||
# Paste foreground to background
|
||||
args.append("--paste {x:+d}{y:+d}".format(x=pos_x, y=pos_y))
|
||||
|
||||
args.append("-o")
|
||||
args.append(output_path)
|
||||
|
||||
return args
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
## OpenPype Deadline repository overlay
|
||||
|
||||
This directory is an overlay for Deadline repository.
|
||||
It means that you can copy the whole hierarchy to Deadline repository and it
|
||||
should work.
|
||||
|
||||
Logic:
|
||||
-----
|
||||
GlobalJobPreLoad
|
||||
-----
|
||||
|
||||
The `GlobalJobPreLoad` will retrieve the OpenPype executable path from the
|
||||
`OpenPype` Deadline Plug-in's settings. Then it will call the executable to
|
||||
retrieve the environment variables needed for the Deadline Job.
|
||||
These environment variables are injected into rendering process.
|
||||
|
||||
Deadline triggers the `GlobalJobPreLoad.py` for each Worker as it starts the
|
||||
Job.
|
||||
|
||||
*Note*: It also contains backward compatible logic to preserve functionality
|
||||
for old Pype2 and non-OpenPype triggered jobs.
|
||||
|
||||
Plugin
|
||||
------
|
||||
For each render and publishing job the `OpenPype` Deadline Plug-in is checked
|
||||
for the configured location of the OpenPype executable (needs to be configured
|
||||
in `Deadline's Configure Plugins > OpenPype`) through `GlobalJobPreLoad`.
|
||||
|
||||
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'deadline' version."""
|
||||
__version__ = "0.2.2"
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
name = "deadline"
|
||||
title = "Deadline"
|
||||
version = "0.2.2"
|
||||
|
||||
client_dir = "ayon_deadline"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
from typing import Type
|
||||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings
|
||||
|
||||
|
||||
class Deadline(BaseServerAddon):
|
||||
settings_model: Type[DeadlineSettings] = DeadlineSettings
|
||||
site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings
|
||||
|
||||
|
||||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
return settings_model_cls(**DEFAULT_VALUES)
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
from .main import (
|
||||
DeadlineSettings,
|
||||
DEFAULT_VALUES,
|
||||
)
|
||||
from .site_settings import DeadlineSiteSettings
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineSettings",
|
||||
"DeadlineSiteSettings",
|
||||
"DEFAULT_VALUES",
|
||||
)
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
from typing import TYPE_CHECKING
|
||||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .publish_plugins import (
|
||||
PublishPluginsModel,
|
||||
DEFAULT_DEADLINE_PLUGINS_SETTINGS
|
||||
)
|
||||
|
||||
|
||||
async def defined_deadline_ws_name_enum_resolver(
|
||||
addon: "BaseServerAddon",
|
||||
settings_variant: str = "production",
|
||||
project_name: str | None = None,
|
||||
) -> list[str]:
|
||||
"""Provides list of names of configured Deadline webservice urls."""
|
||||
if addon is None:
|
||||
return []
|
||||
|
||||
settings = await addon.get_studio_settings(variant=settings_variant)
|
||||
|
||||
ws_server_name = []
|
||||
for deadline_url_item in settings.deadline_urls:
|
||||
ws_server_name.append(deadline_url_item.name)
|
||||
|
||||
return ws_server_name
|
||||
|
||||
class ServerItemSubmodel(BaseSettingsModel):
|
||||
"""Connection info about configured DL servers."""
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Url")
|
||||
require_authentication: bool = SettingsField(
|
||||
False, title="Require authentication")
|
||||
not_verify_ssl: bool = SettingsField(
|
||||
False, title="Don't verify SSL")
|
||||
|
||||
|
||||
class DeadlineSettings(BaseSettingsModel):
|
||||
# configured DL servers
|
||||
deadline_urls: list[ServerItemSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="System Deadline Webservice Info",
|
||||
scope=["studio"],
|
||||
)
|
||||
|
||||
# name(key) of selected server for project
|
||||
deadline_server: str = SettingsField(
|
||||
title="Project Deadline server name",
|
||||
section="---",
|
||||
scope=["project"],
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins",
|
||||
)
|
||||
|
||||
@validator("deadline_urls")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"deadline_urls": [
|
||||
{
|
||||
"name": "default",
|
||||
"value": "http://127.0.0.1:8082",
|
||||
"require_authentication": False,
|
||||
"not_verify_ssl": False
|
||||
}
|
||||
],
|
||||
"deadline_server": "default",
|
||||
"publish": DEFAULT_DEADLINE_PLUGINS_SETTINGS
|
||||
}
|
||||
|
|
@ -1,578 +0,0 @@
|
|||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
|
||||
|
||||
class CollectDeadlinePoolsModel(BaseSettingsModel):
|
||||
"""Settings Deadline default pools."""
|
||||
|
||||
primary_pool: str = SettingsField(title="Primary Pool")
|
||||
|
||||
secondary_pool: str = SettingsField(title="Secondary Pool")
|
||||
|
||||
|
||||
class ValidateExpectedFilesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
allow_user_override: bool = SettingsField(
|
||||
True, title="Allow user change frame range"
|
||||
)
|
||||
families: list[str] = SettingsField(
|
||||
default_factory=list, title="Trigger on families"
|
||||
)
|
||||
targets: list[str] = SettingsField(
|
||||
default_factory=list, title="Trigger for plugins"
|
||||
)
|
||||
|
||||
|
||||
def tile_assembler_enum():
|
||||
"""Return a list of value/label dicts for the enumerator.
|
||||
|
||||
Returning a list of dicts is used to allow for a custom label to be
|
||||
displayed in the UI.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"value": "DraftTileAssembler",
|
||||
"label": "Draft Tile Assembler"
|
||||
},
|
||||
{
|
||||
"value": "OpenPypeTileAssembler",
|
||||
"label": "Open Image IO"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class ScenePatchesSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Patch name")
|
||||
regex: str = SettingsField(title="Patch regex")
|
||||
line: str = SettingsField(title="Patch line")
|
||||
|
||||
|
||||
class MayaSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Maya deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
import_reference: bool = SettingsField(
|
||||
title="Use Scene with Imported Reference"
|
||||
)
|
||||
asset_dependencies: bool = SettingsField(title="Use Asset dependencies")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
tile_priority: int = SettingsField(title="Tile Priority")
|
||||
group: str = SettingsField(title="Group")
|
||||
limit: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups"
|
||||
)
|
||||
tile_assembler_plugin: str = SettingsField(
|
||||
title="Tile Assembler Plugin",
|
||||
enum_resolver=tile_assembler_enum,
|
||||
)
|
||||
jobInfo: str = SettingsField(
|
||||
title="Additional JobInfo data",
|
||||
widget="textarea",
|
||||
)
|
||||
pluginInfo: str = SettingsField(
|
||||
title="Additional PluginInfo data",
|
||||
widget="textarea",
|
||||
)
|
||||
|
||||
scene_patches: list[ScenePatchesSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Scene patches",
|
||||
)
|
||||
strict_error_checking: bool = SettingsField(
|
||||
title="Disable Strict Error Check profiles"
|
||||
)
|
||||
|
||||
@validator("scene_patches")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class MaxSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Frame per Task")
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
|
||||
|
||||
class EnvSearchReplaceSubmodel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Value")
|
||||
|
||||
|
||||
class LimitGroupsSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups"
|
||||
)
|
||||
|
||||
|
||||
def fusion_deadline_plugin_enum():
|
||||
"""Return a list of value/label dicts for the enumerator.
|
||||
|
||||
Returning a list of dicts is used to allow for a custom label to be
|
||||
displayed in the UI.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"value": "Fusion",
|
||||
"label": "Fusion"
|
||||
},
|
||||
{
|
||||
"value": "FusionCmd",
|
||||
"label": "FusionCmd"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class FusionSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
optional: bool = SettingsField(False, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
priority: int = SettingsField(50, title="Priority")
|
||||
chunk_size: int = SettingsField(10, title="Frame per Task")
|
||||
concurrent_tasks: int = SettingsField(
|
||||
1, title="Number of concurrent tasks"
|
||||
)
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
plugin: str = SettingsField("Fusion",
|
||||
enum_resolver=fusion_deadline_plugin_enum,
|
||||
title="Deadline Plugin")
|
||||
|
||||
|
||||
class NukeSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Nuke deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
concurrent_tasks: int = SettingsField(title="Number of concurrent tasks")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
use_gpu: bool = SettingsField(title="Use GPU")
|
||||
workfile_dependency: bool = SettingsField(title="Workfile Dependency")
|
||||
use_published_workfile: bool = SettingsField(
|
||||
title="Use Published Workfile"
|
||||
)
|
||||
|
||||
env_allowed_keys: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Allowed environment keys"
|
||||
)
|
||||
|
||||
env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Search & replace in environment values",
|
||||
)
|
||||
|
||||
limit_groups: list[LimitGroupsSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups",
|
||||
)
|
||||
|
||||
@validator(
|
||||
"limit_groups",
|
||||
"env_search_replace_values")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class HarmonySubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Harmony deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Houdini deadline render submitter settings."""
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
|
||||
export_priority: int = SettingsField(title="Export Priority")
|
||||
export_chunk_size: int = SettingsField(title="Export Chunk Size")
|
||||
export_group: str = SettingsField(title="Export Group")
|
||||
|
||||
|
||||
class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Houdini deadline cache submitter settings."""
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
|
||||
|
||||
class AfterEffectsSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""After Effects deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
multiprocess: bool = SettingsField(title="Optional")
|
||||
|
||||
|
||||
class CelactionSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
deadline_department: str = SettingsField("", title="Deadline apartment")
|
||||
deadline_priority: int = SettingsField(50, title="Deadline priority")
|
||||
deadline_pool: str = SettingsField("", title="Deadline pool")
|
||||
deadline_pool_secondary: str = SettingsField(
|
||||
"", title="Deadline pool (secondary)"
|
||||
)
|
||||
deadline_group: str = SettingsField("", title="Deadline Group")
|
||||
deadline_chunk_size: int = SettingsField(10, title="Deadline Chunk size")
|
||||
deadline_job_delay: str = SettingsField(
|
||||
"", title="Delay job (timecode dd:hh:mm:ss)"
|
||||
)
|
||||
|
||||
|
||||
class BlenderSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
asset_dependencies: bool = SettingsField(title="Use Asset dependencies")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Frame per Task")
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
job_delay: str = SettingsField(
|
||||
"", title="Delay job (timecode dd:hh:mm:ss)"
|
||||
)
|
||||
|
||||
|
||||
class AOVFilterSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Host")
|
||||
value: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="AOV regex"
|
||||
)
|
||||
|
||||
|
||||
class ProcessCacheJobFarmModel(BaseSettingsModel):
|
||||
"""Process submitted job on farm."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
deadline_department: str = SettingsField(title="Department")
|
||||
deadline_pool: str = SettingsField(title="Pool")
|
||||
deadline_group: str = SettingsField(title="Group")
|
||||
deadline_chunk_size: int = SettingsField(title="Chunk Size")
|
||||
deadline_priority: int = SettingsField(title="Priority")
|
||||
|
||||
|
||||
class ProcessSubmittedJobOnFarmModel(BaseSettingsModel):
|
||||
"""Process submitted job on farm."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
deadline_department: str = SettingsField(title="Department")
|
||||
deadline_pool: str = SettingsField(title="Pool")
|
||||
deadline_group: str = SettingsField(title="Group")
|
||||
deadline_chunk_size: int = SettingsField(title="Chunk Size")
|
||||
deadline_priority: int = SettingsField(title="Priority")
|
||||
publishing_script: str = SettingsField(title="Publishing script path")
|
||||
skip_integration_repre_list: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Skip integration of representation with ext"
|
||||
)
|
||||
families_transfer: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title=(
|
||||
"List of family names to transfer\n"
|
||||
"to generated instances (AOVs for example)."
|
||||
)
|
||||
)
|
||||
aov_filter: list[AOVFilterSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Reviewable products filter",
|
||||
)
|
||||
|
||||
@validator("aov_filter")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField(
|
||||
default_factory=CollectDeadlinePoolsModel,
|
||||
title="Default Pools")
|
||||
ValidateExpectedFiles: ValidateExpectedFilesModel = SettingsField(
|
||||
default_factory=ValidateExpectedFilesModel,
|
||||
title="Validate Expected Files"
|
||||
)
|
||||
AfterEffectsSubmitDeadline: AfterEffectsSubmitDeadlineModel = (
|
||||
SettingsField(
|
||||
default_factory=AfterEffectsSubmitDeadlineModel,
|
||||
title="After Effects to deadline",
|
||||
section="Hosts"
|
||||
)
|
||||
)
|
||||
BlenderSubmitDeadline: BlenderSubmitDeadlineModel = SettingsField(
|
||||
default_factory=BlenderSubmitDeadlineModel,
|
||||
title="Blender Submit Deadline")
|
||||
CelactionSubmitDeadline: CelactionSubmitDeadlineModel = SettingsField(
|
||||
default_factory=CelactionSubmitDeadlineModel,
|
||||
title="Celaction Submit Deadline")
|
||||
FusionSubmitDeadline: FusionSubmitDeadlineModel = SettingsField(
|
||||
default_factory=FusionSubmitDeadlineModel,
|
||||
title="Fusion submit to Deadline")
|
||||
HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField(
|
||||
default_factory=HarmonySubmitDeadlineModel,
|
||||
title="Harmony Submit to deadline")
|
||||
HoudiniCacheSubmitDeadline: HoudiniCacheSubmitDeadlineModel = SettingsField(
|
||||
default_factory=HoudiniCacheSubmitDeadlineModel,
|
||||
title="Houdini Submit cache to deadline")
|
||||
HoudiniSubmitDeadline: HoudiniSubmitDeadlineModel = SettingsField(
|
||||
default_factory=HoudiniSubmitDeadlineModel,
|
||||
title="Houdini Submit render to deadline")
|
||||
MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField(
|
||||
default_factory=MaxSubmitDeadlineModel,
|
||||
title="Max Submit to deadline")
|
||||
MayaSubmitDeadline: MayaSubmitDeadlineModel = SettingsField(
|
||||
default_factory=MayaSubmitDeadlineModel,
|
||||
title="Maya Submit to deadline")
|
||||
NukeSubmitDeadline: NukeSubmitDeadlineModel = SettingsField(
|
||||
default_factory=NukeSubmitDeadlineModel,
|
||||
title="Nuke Submit to deadline")
|
||||
ProcessSubmittedCacheJobOnFarm: ProcessCacheJobFarmModel = SettingsField(
|
||||
default_factory=ProcessCacheJobFarmModel,
|
||||
title="Process submitted cache Job on farm",
|
||||
section="Publish Jobs")
|
||||
ProcessSubmittedJobOnFarm: ProcessSubmittedJobOnFarmModel = SettingsField(
|
||||
default_factory=ProcessSubmittedJobOnFarmModel,
|
||||
title="Process submitted job on farm")
|
||||
|
||||
|
||||
DEFAULT_DEADLINE_PLUGINS_SETTINGS = {
|
||||
"CollectDeadlinePools": {
|
||||
"primary_pool": "",
|
||||
"secondary_pool": ""
|
||||
},
|
||||
"ValidateExpectedFiles": {
|
||||
"enabled": True,
|
||||
"active": True,
|
||||
"allow_user_override": True,
|
||||
"families": [
|
||||
"render"
|
||||
],
|
||||
"targets": [
|
||||
"deadline"
|
||||
]
|
||||
},
|
||||
"AfterEffectsSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10000,
|
||||
"group": "",
|
||||
"department": "",
|
||||
"multiprocess": True
|
||||
},
|
||||
"BlenderSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"asset_dependencies": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"group": "none",
|
||||
"job_delay": "00:00:00:00"
|
||||
},
|
||||
"CelactionSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_priority": 50,
|
||||
"deadline_pool": "",
|
||||
"deadline_pool_secondary": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 10,
|
||||
"deadline_job_delay": "00:00:00:00"
|
||||
},
|
||||
"FusionSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
"group": ""
|
||||
},
|
||||
"HarmonySubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10000,
|
||||
"group": "",
|
||||
"department": ""
|
||||
},
|
||||
"HoudiniCacheSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 999999,
|
||||
"group": ""
|
||||
},
|
||||
"HoudiniSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 1,
|
||||
"group": "",
|
||||
"export_priority": 50,
|
||||
"export_chunk_size": 10,
|
||||
"export_group": ""
|
||||
},
|
||||
"MaxSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"group": "none"
|
||||
},
|
||||
"MayaSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"tile_assembler_plugin": "DraftTileAssembler",
|
||||
"use_published": True,
|
||||
"import_reference": False,
|
||||
"asset_dependencies": True,
|
||||
"strict_error_checking": True,
|
||||
"priority": 50,
|
||||
"tile_priority": 50,
|
||||
"group": "none",
|
||||
"limit": [],
|
||||
# this used to be empty dict
|
||||
"jobInfo": "",
|
||||
# this used to be empty dict
|
||||
"pluginInfo": "",
|
||||
"scene_patches": []
|
||||
},
|
||||
"NukeSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
"group": "",
|
||||
"department": "",
|
||||
"use_gpu": True,
|
||||
"workfile_dependency": True,
|
||||
"use_published_workfile": True,
|
||||
"env_allowed_keys": [],
|
||||
"env_search_replace_values": [],
|
||||
"limit_groups": []
|
||||
},
|
||||
"ProcessSubmittedCacheJobOnFarm": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_pool": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 1,
|
||||
"deadline_priority": 50
|
||||
},
|
||||
"ProcessSubmittedJobOnFarm": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_pool": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 1,
|
||||
"deadline_priority": 50,
|
||||
"publishing_script": "",
|
||||
"skip_integration_repre_list": [],
|
||||
"families_transfer": ["render3d", "render2d", "ftrack", "slate"],
|
||||
"aov_filter": [
|
||||
{
|
||||
"name": "maya",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "blender",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "aftereffects",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "celaction",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "harmony",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "max",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "fusion",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
)
|
||||
|
||||
from .main import defined_deadline_ws_name_enum_resolver
|
||||
|
||||
|
||||
class CredentialPerServerModel(BaseSettingsModel):
|
||||
"""Provide credentials for configured DL servers"""
|
||||
_layout = "expanded"
|
||||
server_name: str = SettingsField(
|
||||
"",
|
||||
title="DL server name",
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
username: str = SettingsField("", title="Username")
|
||||
password: str = SettingsField("", title="Password")
|
||||
|
||||
|
||||
class DeadlineSiteSettings(BaseSettingsModel):
|
||||
local_settings: list[CredentialPerServerModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Local setting",
|
||||
description=(
|
||||
"Please provide credentials for configured Deadline servers"
|
||||
),
|
||||
)
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from .version import __version__
|
||||
from .addon import (
|
||||
MaxAddon,
|
||||
MAX_HOST_DIR,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
"MaxAddon",
|
||||
"MAX_HOST_DIR",
|
||||
)
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
from .version import __version__
|
||||
|
||||
MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class MaxAddon(AYONAddon, IHostAddon):
|
||||
name = "max"
|
||||
version = __version__
|
||||
host_name = "max"
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Remove auto screen scale factor for Qt
|
||||
# - let 3dsmax decide it's value
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".max"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(MAX_HOST_DIR, "hooks")
|
||||
]
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Public API for 3dsmax"""
|
||||
|
||||
from .pipeline import (
|
||||
MaxHost,
|
||||
)
|
||||
|
||||
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
lsattr,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"MaxHost",
|
||||
"maintained_selection",
|
||||
"lsattr",
|
||||
"get_all_children"
|
||||
]
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
from pymxs import runtime as rt
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid objects in Blender when a publish plug-in failed."""
|
||||
label = "Select Invalid"
|
||||
on = "failed"
|
||||
icon = "search"
|
||||
|
||||
def process(self, context, plugin):
|
||||
errored_instances = get_errored_instances_from_context(context,
|
||||
plugin=plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes...")
|
||||
invalid = list()
|
||||
for instance in errored_instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Failed plug-in doesn't have any selectable objects."
|
||||
)
|
||||
|
||||
if not invalid:
|
||||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
invalid_names = [obj.name for obj in invalid if not isinstance(obj, tuple)]
|
||||
if not invalid_names:
|
||||
invalid_names = [obj.name for obj, _ in invalid]
|
||||
invalid = [obj for obj, _ in invalid]
|
||||
self.log.info(
|
||||
"Selecting invalid objects: %s", ", ".join(invalid_names)
|
||||
)
|
||||
|
||||
rt.Select(invalid)
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
import attr
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
@attr.s
|
||||
class LayerMetadata(object):
|
||||
"""Data class for Render Layer metadata."""
|
||||
frameStart = attr.ib()
|
||||
frameEnd = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RenderProduct(object):
|
||||
"""Getting Colorspace as
|
||||
Specific Render Product Parameter for submitting
|
||||
publish job.
|
||||
"""
|
||||
colorspace = attr.ib() # colorspace
|
||||
view = attr.ib()
|
||||
productName = attr.ib(default=None)
|
||||
|
||||
|
||||
class ARenderProduct(object):
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
# Initialize
|
||||
self.layer_data = self._get_layer_data()
|
||||
self.layer_data.products = self.get_colorspace_data()
|
||||
|
||||
def _get_layer_data(self):
|
||||
return LayerMetadata(
|
||||
frameStart=int(rt.rendStart),
|
||||
frameEnd=int(rt.rendEnd),
|
||||
)
|
||||
|
||||
def get_colorspace_data(self):
|
||||
"""To be implemented by renderer class.
|
||||
This should return a list of RenderProducts.
|
||||
Returns:
|
||||
list: List of RenderProduct
|
||||
"""
|
||||
colorspace_data = [
|
||||
RenderProduct(
|
||||
colorspace="sRGB",
|
||||
view="ACES 1.0",
|
||||
productName=""
|
||||
)
|
||||
]
|
||||
return colorspace_data
|
||||
|
|
@ -1,589 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Library of functions useful for 3dsmax pipeline."""
|
||||
import contextlib
|
||||
import logging
|
||||
import json
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import six
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
colorspace
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline.context_tools import (
|
||||
get_current_task_entity
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
JSON_PREFIX = "JSON::"
|
||||
log = logging.getLogger("ayon_max")
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Max's main window"""
|
||||
from qtpy import QtWidgets
|
||||
top_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
name = "QmaxApplicationWindow"
|
||||
for widget in top_widgets:
|
||||
if (
|
||||
widget.inherits("QMainWindow")
|
||||
and widget.metaObject().className() == name
|
||||
):
|
||||
return widget
|
||||
raise RuntimeError('Count not find 3dsMax main window.')
|
||||
|
||||
|
||||
def imprint(node_name: str, data: dict) -> bool:
|
||||
node = rt.GetNodeByName(node_name)
|
||||
if not node:
|
||||
return False
|
||||
|
||||
for k, v in data.items():
|
||||
if isinstance(v, (dict, list)):
|
||||
rt.SetUserProp(node, k, f"{JSON_PREFIX}{json.dumps(v)}")
|
||||
else:
|
||||
rt.SetUserProp(node, k, v)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def lsattr(
|
||||
attr: str,
|
||||
value: Union[str, None] = None,
|
||||
root: Union[str, None] = None) -> list:
|
||||
"""List nodes having attribute with specified value.
|
||||
|
||||
Args:
|
||||
attr (str): Attribute name to match.
|
||||
value (str, Optional): Value to match, of omitted, all nodes
|
||||
with specified attribute are returned no matter of value.
|
||||
root (str, Optional): Root node name. If omitted, scene root is used.
|
||||
|
||||
Returns:
|
||||
list of nodes.
|
||||
"""
|
||||
root = rt.RootNode if root is None else rt.GetNodeByName(root)
|
||||
|
||||
def output_node(node, nodes):
|
||||
nodes.append(node)
|
||||
for child in node.Children:
|
||||
output_node(child, nodes)
|
||||
|
||||
nodes = []
|
||||
output_node(root, nodes)
|
||||
return [
|
||||
n for n in nodes
|
||||
if rt.GetUserProp(n, attr) == value
|
||||
] if value else [
|
||||
n for n in nodes
|
||||
if rt.GetUserProp(n, attr)
|
||||
]
|
||||
|
||||
|
||||
def read(container) -> dict:
|
||||
data = {}
|
||||
props = rt.GetUserPropBuffer(container)
|
||||
# this shouldn't happen but let's guard against it anyway
|
||||
if not props:
|
||||
return data
|
||||
|
||||
for line in props.split("\r\n"):
|
||||
try:
|
||||
key, value = line.split("=")
|
||||
except ValueError:
|
||||
# if the line cannot be split we can't really parse it
|
||||
continue
|
||||
|
||||
value = value.strip()
|
||||
if isinstance(value.strip(), six.string_types) and \
|
||||
value.startswith(JSON_PREFIX):
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
value = json.loads(value[len(JSON_PREFIX):])
|
||||
|
||||
# default value behavior
|
||||
# convert maxscript boolean values
|
||||
if value == "true":
|
||||
value = True
|
||||
elif value == "false":
|
||||
value = False
|
||||
|
||||
data[key.strip()] = value
|
||||
|
||||
data["instance_node"] = container.Name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
previous_selection = rt.GetCurrentSelection()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if previous_selection:
|
||||
rt.Select(previous_selection)
|
||||
else:
|
||||
rt.Select()
|
||||
|
||||
|
||||
def get_all_children(parent, node_type=None):
|
||||
"""Handy function to get all the children of a given node
|
||||
|
||||
Args:
|
||||
parent (3dsmax Node1): Node to get all children of.
|
||||
node_type (None, runtime.class): give class to check for
|
||||
e.g. rt.FFDBox/rt.GeometryClass etc.
|
||||
|
||||
Returns:
|
||||
list: list of all children of the parent node
|
||||
"""
|
||||
def list_children(node):
|
||||
children = []
|
||||
for c in node.Children:
|
||||
children.append(c)
|
||||
children = children + list_children(c)
|
||||
return children
|
||||
child_list = list_children(parent)
|
||||
|
||||
return ([x for x in child_list if rt.SuperClassOf(x) == node_type]
|
||||
if node_type else child_list)
|
||||
|
||||
|
||||
def get_current_renderer():
|
||||
"""
|
||||
Notes:
|
||||
Get current renderer for Max
|
||||
|
||||
Returns:
|
||||
"{Current Renderer}:{Current Renderer}"
|
||||
e.g. "Redshift_Renderer:Redshift_Renderer"
|
||||
"""
|
||||
return rt.renderers.production
|
||||
|
||||
|
||||
def get_default_render_folder(project_setting=None):
|
||||
return (project_setting["max"]
|
||||
["RenderSettings"]
|
||||
["default_render_image_folder"])
|
||||
|
||||
|
||||
def set_render_frame_range(start_frame, end_frame):
|
||||
"""
|
||||
Note:
|
||||
Frame range can be specified in different types. Possible values are:
|
||||
* `1` - Single frame.
|
||||
* `2` - Active time segment ( animationRange ).
|
||||
* `3` - User specified Range.
|
||||
* `4` - User specified Frame pickup string (for example `1,3,5-12`).
|
||||
|
||||
Todo:
|
||||
Current type is hard-coded, there should be a custom setting for this.
|
||||
"""
|
||||
rt.rendTimeType = 3
|
||||
if start_frame is not None and end_frame is not None:
|
||||
rt.rendStart = int(start_frame)
|
||||
rt.rendEnd = int(end_frame)
|
||||
|
||||
|
||||
def get_multipass_setting(project_setting=None):
|
||||
return (project_setting["max"]
|
||||
["RenderSettings"]
|
||||
["multipass"])
|
||||
|
||||
|
||||
def set_scene_resolution(width: int, height: int):
|
||||
"""Set the render resolution
|
||||
|
||||
Args:
|
||||
width(int): value of the width
|
||||
height(int): value of the height
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
# make sure the render dialog is closed
|
||||
# for the update of resolution
|
||||
# Changing the Render Setup dialog settings should be done
|
||||
# with the actual Render Setup dialog in a closed state.
|
||||
if rt.renderSceneDialog.isOpen():
|
||||
rt.renderSceneDialog.close()
|
||||
|
||||
rt.renderWidth = width
|
||||
rt.renderHeight = height
|
||||
|
||||
|
||||
def reset_scene_resolution():
|
||||
"""Apply the scene resolution from the project definition
|
||||
|
||||
scene resolution can be overwritten by a folder if the folder.attrib
|
||||
contains any information regarding scene resolution.
|
||||
"""
|
||||
task_attributes = get_current_task_entity(fields={"attrib"})["attrib"]
|
||||
width = int(task_attributes["resolutionWidth"])
|
||||
height = int(task_attributes["resolutionHeight"])
|
||||
|
||||
set_scene_resolution(width, height)
|
||||
|
||||
|
||||
def get_frame_range(task_entity=None) -> Union[Dict[str, Any], None]:
|
||||
"""Get the current task frame range and handles
|
||||
|
||||
Args:
|
||||
task_entity (dict): Task Entity.
|
||||
|
||||
Returns:
|
||||
dict: with frame start, frame end, handle start, handle end.
|
||||
"""
|
||||
# Set frame start/end
|
||||
if task_entity is None:
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
task_attributes = task_entity["attrib"]
|
||||
frame_start = int(task_attributes["frameStart"])
|
||||
frame_end = int(task_attributes["frameEnd"])
|
||||
handle_start = int(task_attributes["handleStart"])
|
||||
handle_end = int(task_attributes["handleEnd"])
|
||||
frame_start_handle = frame_start - handle_start
|
||||
frame_end_handle = frame_end + handle_end
|
||||
|
||||
return {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": frame_start_handle,
|
||||
"frameEndHandle": frame_end_handle,
|
||||
}
|
||||
|
||||
|
||||
def reset_frame_range(fps: bool = True):
|
||||
"""Set frame range to current folder.
|
||||
This is part of 3dsmax documentation:
|
||||
|
||||
animationRange: A System Global variable which lets you get and
|
||||
set an Interval value that defines the start and end frames
|
||||
of the Active Time Segment.
|
||||
frameRate: A System Global variable which lets you get
|
||||
and set an Integer value that defines the current
|
||||
scene frame rate in frames-per-second.
|
||||
"""
|
||||
if fps:
|
||||
rt.frameRate = float(get_fps_for_current_context())
|
||||
|
||||
frame_range = get_frame_range()
|
||||
|
||||
set_timeline(
|
||||
frame_range["frameStartHandle"], frame_range["frameEndHandle"])
|
||||
set_render_frame_range(
|
||||
frame_range["frameStartHandle"], frame_range["frameEndHandle"])
|
||||
|
||||
|
||||
def get_fps_for_current_context():
|
||||
"""Get fps that should be set for current context.
|
||||
|
||||
Todos:
|
||||
- Skip project value.
|
||||
- Merge logic with 'get_frame_range' and 'reset_scene_resolution' ->
|
||||
all the values in the functions can be collected at one place as
|
||||
they have same requirements.
|
||||
|
||||
Returns:
|
||||
Union[int, float]: FPS value.
|
||||
"""
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
return task_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
def reset_unit_scale():
|
||||
"""Apply the unit scale setting to 3dsMax
|
||||
"""
|
||||
project_name = get_current_project_name()
|
||||
settings = get_project_settings(project_name).get("max")
|
||||
scene_scale = settings.get("unit_scale_settings",
|
||||
{}).get("scene_unit_scale")
|
||||
if scene_scale:
|
||||
rt.units.DisplayType = rt.Name("Metric")
|
||||
rt.units.MetricType = rt.Name(scene_scale)
|
||||
else:
|
||||
rt.units.DisplayType = rt.Name("Generic")
|
||||
|
||||
|
||||
def convert_unit_scale():
|
||||
"""Convert system unit scale in 3dsMax
|
||||
for fbx export
|
||||
|
||||
Returns:
|
||||
str: unit scale
|
||||
"""
|
||||
unit_scale_dict = {
|
||||
"millimeters": "mm",
|
||||
"centimeters": "cm",
|
||||
"meters": "m",
|
||||
"kilometers": "km"
|
||||
}
|
||||
current_unit_scale = rt.Execute("units.MetricType as string")
|
||||
return unit_scale_dict[current_unit_scale]
|
||||
|
||||
|
||||
def set_context_setting():
|
||||
"""Apply the project settings from the project definition
|
||||
|
||||
Settings can be overwritten by an folder if the folder.attrib contains
|
||||
any information regarding those settings.
|
||||
|
||||
Examples of settings:
|
||||
frame range
|
||||
resolution
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
reset_scene_resolution()
|
||||
reset_frame_range()
|
||||
reset_colorspace()
|
||||
reset_unit_scale()
|
||||
|
||||
|
||||
def get_max_version():
|
||||
"""
|
||||
Args:
|
||||
get max version date for deadline
|
||||
|
||||
Returns:
|
||||
#(25000, 62, 0, 25, 0, 0, 997, 2023, "")
|
||||
max_info[7] = max version date
|
||||
"""
|
||||
max_info = rt.MaxVersion()
|
||||
return max_info[7]
|
||||
|
||||
|
||||
def is_headless():
|
||||
"""Check if 3dsMax runs in batch mode.
|
||||
If it returns True, it runs in 3dsbatch.exe
|
||||
If it returns False, it runs in 3dsmax.exe
|
||||
"""
|
||||
return rt.maxops.isInNonInteractiveMode()
|
||||
|
||||
|
||||
def set_timeline(frameStart, frameEnd):
|
||||
"""Set frame range for timeline editor in Max
|
||||
"""
|
||||
rt.animationRange = rt.interval(int(frameStart), int(frameEnd))
|
||||
return rt.animationRange
|
||||
|
||||
|
||||
def reset_colorspace():
|
||||
"""OCIO Configuration
|
||||
Supports in 3dsMax 2024+
|
||||
|
||||
"""
|
||||
if int(get_max_version()) < 2024:
|
||||
return
|
||||
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data:
|
||||
ocio_config_path = max_config_data["path"]
|
||||
colorspace_mgr = rt.ColorPipelineMgr
|
||||
colorspace_mgr.Mode = rt.Name("OCIO_Custom")
|
||||
colorspace_mgr.OCIOConfigPath = ocio_config_path
|
||||
|
||||
|
||||
def check_colorspace():
|
||||
parent = get_main_window()
|
||||
if parent is None:
|
||||
log.info("Skipping outdated pop-up "
|
||||
"because Max main window can't be found.")
|
||||
if int(get_max_version()) >= 2024:
|
||||
color_mgr = rt.ColorPipelineMgr
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data and color_mgr.Mode != rt.Name("OCIO_Custom"):
|
||||
if not is_headless():
|
||||
from ayon_core.tools.utils import SimplePopup
|
||||
dialog = SimplePopup(parent=parent)
|
||||
dialog.setWindowTitle("Warning: Wrong OCIO Mode")
|
||||
dialog.set_message("This scene has wrong OCIO "
|
||||
"Mode setting.")
|
||||
dialog.set_button_text("Fix")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
dialog.on_clicked.connect(reset_colorspace)
|
||||
dialog.show()
|
||||
|
||||
def unique_namespace(namespace, format="%02d",
|
||||
prefix="", suffix="", con_suffix="CON"):
|
||||
"""Return unique namespace
|
||||
|
||||
Arguments:
|
||||
namespace (str): Name of namespace to consider
|
||||
format (str, optional): Formatting of the given iteration number
|
||||
suffix (str, optional): Only consider namespaces with this suffix.
|
||||
con_suffix: max only, for finding the name of the master container
|
||||
|
||||
>>> unique_namespace("bar")
|
||||
# bar01
|
||||
>>> unique_namespace(":hello")
|
||||
# :hello01
|
||||
>>> unique_namespace("bar:", suffix="_NS")
|
||||
# bar01_NS:
|
||||
|
||||
"""
|
||||
|
||||
def current_namespace():
|
||||
current = namespace
|
||||
# When inside a namespace Max adds no trailing :
|
||||
if not current.endswith(":"):
|
||||
current += ":"
|
||||
return current
|
||||
|
||||
# Always check against the absolute namespace root
|
||||
# There's no clash with :x if we're defining namespace :a:x
|
||||
ROOT = ":" if namespace.startswith(":") else current_namespace()
|
||||
|
||||
# Strip trailing `:` tokens since we might want to add a suffix
|
||||
start = ":" if namespace.startswith(":") else ""
|
||||
end = ":" if namespace.endswith(":") else ""
|
||||
namespace = namespace.strip(":")
|
||||
if ":" in namespace:
|
||||
# Split off any nesting that we don't uniqify anyway.
|
||||
parents, namespace = namespace.rsplit(":", 1)
|
||||
start += parents + ":"
|
||||
ROOT += start
|
||||
|
||||
iteration = 1
|
||||
increment_version = True
|
||||
while increment_version:
|
||||
nr_namespace = namespace + format % iteration
|
||||
unique = prefix + nr_namespace + suffix
|
||||
container_name = f"{unique}:{namespace}{con_suffix}"
|
||||
if not rt.getNodeByName(container_name):
|
||||
name_space = start + unique + end
|
||||
increment_version = False
|
||||
return name_space
|
||||
else:
|
||||
increment_version = True
|
||||
iteration += 1
|
||||
|
||||
|
||||
def get_namespace(container_name):
|
||||
"""Get the namespace and name of the sub-container
|
||||
|
||||
Args:
|
||||
container_name (str): the name of master container
|
||||
|
||||
Raises:
|
||||
RuntimeError: when there is no master container found
|
||||
|
||||
Returns:
|
||||
namespace (str): namespace of the sub-container
|
||||
name (str): name of the sub-container
|
||||
"""
|
||||
node = rt.getNodeByName(container_name)
|
||||
if not node:
|
||||
raise RuntimeError("Master Container Not Found..")
|
||||
name = rt.getUserProp(node, "name")
|
||||
namespace = rt.getUserProp(node, "namespace")
|
||||
return namespace, name
|
||||
|
||||
|
||||
def object_transform_set(container_children):
|
||||
"""A function which allows to store the transform of
|
||||
previous loaded object(s)
|
||||
Args:
|
||||
container_children(list): A list of nodes
|
||||
|
||||
Returns:
|
||||
transform_set (dict): A dict with all transform data of
|
||||
the previous loaded object(s)
|
||||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
||||
|
||||
def get_plugins() -> list:
|
||||
"""Get all loaded plugins in 3dsMax
|
||||
|
||||
Returns:
|
||||
plugin_info_list: a list of loaded plugins
|
||||
"""
|
||||
manager = rt.PluginManager
|
||||
count = manager.pluginDllCount
|
||||
plugin_info_list = []
|
||||
for p in range(1, count + 1):
|
||||
plugin_info = manager.pluginDllName(p)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return plugin_info_list
|
||||
|
||||
|
||||
def update_modifier_node_names(event, node):
|
||||
"""Update the name of the nodes after renaming
|
||||
|
||||
Args:
|
||||
event (pymxs.MXSWrapperBase): Event Name (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
node (list): Event Number (
|
||||
Mandatory argument for rt.NodeEventCallback)
|
||||
|
||||
"""
|
||||
containers = [
|
||||
obj
|
||||
for obj in rt.Objects
|
||||
if (
|
||||
rt.ClassOf(obj) == rt.Container
|
||||
and rt.getUserProp(obj, "id") == "pyblish.avalon.instance"
|
||||
and rt.getUserProp(obj, "productType") not in {
|
||||
"workfile", "tyflow"
|
||||
}
|
||||
)
|
||||
]
|
||||
if not containers:
|
||||
return
|
||||
for container in containers:
|
||||
ayon_data = container.modifiers[0].openPypeData
|
||||
updated_node_names = [str(node.node) for node
|
||||
in ayon_data.all_handles]
|
||||
rt.setProperty(ayon_data, "sel_list", updated_node_names)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def render_resolution(width, height):
|
||||
"""Set render resolution option during context
|
||||
|
||||
Args:
|
||||
width (int): render width
|
||||
height (int): render height
|
||||
"""
|
||||
current_renderWidth = rt.renderWidth
|
||||
current_renderHeight = rt.renderHeight
|
||||
try:
|
||||
rt.renderWidth = width
|
||||
rt.renderHeight = height
|
||||
yield
|
||||
finally:
|
||||
rt.renderWidth = current_renderWidth
|
||||
rt.renderHeight = current_renderHeight
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suspended_refresh():
|
||||
"""Suspended refresh for scene and modify panel redraw.
|
||||
"""
|
||||
if is_headless():
|
||||
yield
|
||||
return
|
||||
rt.disableSceneRedraw()
|
||||
rt.suspendEditing()
|
||||
try:
|
||||
yield
|
||||
|
||||
finally:
|
||||
rt.enableSceneRedraw()
|
||||
rt.resumeEditing()
|
||||
|
|
@ -1,275 +0,0 @@
|
|||
# Render Element Example : For scanline render, VRay
|
||||
# https://help.autodesk.com/view/MAXDEV/2022/ENU/?guid=GUID-E8F75D47-B998-4800-A3A5-610E22913CFC
|
||||
# arnold
|
||||
# https://help.autodesk.com/view/ARNOL/ENU/?guid=arnold_for_3ds_max_ax_maxscript_commands_ax_renderview_commands_html
|
||||
import os
|
||||
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_max.api.lib import get_current_renderer
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
class RenderProducts(object):
|
||||
|
||||
def __init__(self, project_settings=None):
|
||||
self._project_settings = project_settings
|
||||
if not self._project_settings:
|
||||
self._project_settings = get_project_settings(
|
||||
get_current_project_name()
|
||||
)
|
||||
|
||||
def get_beauty(self, container):
|
||||
render_dir = os.path.dirname(rt.rendOutputFilename)
|
||||
|
||||
output_file = os.path.join(render_dir, container)
|
||||
|
||||
setting = self._project_settings
|
||||
img_fmt = setting["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
||||
start_frame = int(rt.rendStart)
|
||||
end_frame = int(rt.rendEnd) + 1
|
||||
|
||||
return {
|
||||
"beauty": self.get_expected_beauty(
|
||||
output_file, start_frame, end_frame, img_fmt
|
||||
)
|
||||
}
|
||||
|
||||
def get_multiple_beauty(self, outputs, cameras):
|
||||
beauty_output_frames = dict()
|
||||
for output, camera in zip(outputs, cameras):
|
||||
filename, ext = os.path.splitext(output)
|
||||
filename = filename.replace(".", "")
|
||||
ext = ext.replace(".", "")
|
||||
start_frame = int(rt.rendStart)
|
||||
end_frame = int(rt.rendEnd) + 1
|
||||
new_beauty = self.get_expected_beauty(
|
||||
filename, start_frame, end_frame, ext
|
||||
)
|
||||
beauty_output = ({
|
||||
f"{camera}_beauty": new_beauty
|
||||
})
|
||||
beauty_output_frames.update(beauty_output)
|
||||
return beauty_output_frames
|
||||
|
||||
def get_multiple_aovs(self, outputs, cameras):
|
||||
renderer_class = get_current_renderer()
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
aovs_frames = {}
|
||||
for output, camera in zip(outputs, cameras):
|
||||
filename, ext = os.path.splitext(output)
|
||||
filename = filename.replace(".", "")
|
||||
ext = ext.replace(".", "")
|
||||
start_frame = int(rt.rendStart)
|
||||
end_frame = int(rt.rendEnd) + 1
|
||||
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
aovs_frames.update({
|
||||
f"{camera}_{name}": self.get_expected_aovs(
|
||||
filename, name, start_frame,
|
||||
end_frame, ext)
|
||||
})
|
||||
elif renderer == "Redshift_Renderer":
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
rs_aov_files = rt.Execute("renderers.current.separateAovFiles") # noqa
|
||||
# this doesn't work, always returns False
|
||||
# rs_AovFiles = rt.RedShift_Renderer().separateAovFiles
|
||||
if ext == "exr" and not rs_aov_files:
|
||||
for name in render_name:
|
||||
if name == "RsCryptomatte":
|
||||
aovs_frames.update({
|
||||
f"{camera}_{name}": self.get_expected_aovs(
|
||||
filename, name, start_frame,
|
||||
end_frame, ext)
|
||||
})
|
||||
else:
|
||||
for name in render_name:
|
||||
aovs_frames.update({
|
||||
f"{camera}_{name}": self.get_expected_aovs(
|
||||
filename, name, start_frame,
|
||||
end_frame, ext)
|
||||
})
|
||||
elif renderer == "Arnold":
|
||||
render_name = self.get_arnold_product_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
aovs_frames.update({
|
||||
f"{camera}_{name}": self.get_expected_arnold_product( # noqa
|
||||
filename, name, start_frame,
|
||||
end_frame, ext)
|
||||
})
|
||||
elif renderer in [
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3"
|
||||
]:
|
||||
if ext != "exr":
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
aovs_frames.update({
|
||||
f"{camera}_{name}": self.get_expected_aovs(
|
||||
filename, name, start_frame,
|
||||
end_frame, ext)
|
||||
})
|
||||
|
||||
return aovs_frames
|
||||
|
||||
def get_aovs(self, container):
|
||||
render_dir = os.path.dirname(rt.rendOutputFilename)
|
||||
|
||||
output_file = os.path.join(render_dir,
|
||||
container)
|
||||
|
||||
setting = self._project_settings
|
||||
img_fmt = setting["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
||||
start_frame = int(rt.rendStart)
|
||||
end_frame = int(rt.rendEnd) + 1
|
||||
renderer_class = get_current_renderer()
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
render_dict = {}
|
||||
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
render_dict.update({
|
||||
name: self.get_expected_aovs(
|
||||
output_file, name, start_frame,
|
||||
end_frame, img_fmt)
|
||||
})
|
||||
elif renderer == "Redshift_Renderer":
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
rs_aov_files = rt.Execute("renderers.current.separateAovFiles")
|
||||
# this doesn't work, always returns False
|
||||
# rs_AovFiles = rt.RedShift_Renderer().separateAovFiles
|
||||
if img_fmt == "exr" and not rs_aov_files:
|
||||
for name in render_name:
|
||||
if name == "RsCryptomatte":
|
||||
render_dict.update({
|
||||
name: self.get_expected_aovs(
|
||||
output_file, name, start_frame,
|
||||
end_frame, img_fmt)
|
||||
})
|
||||
else:
|
||||
for name in render_name:
|
||||
render_dict.update({
|
||||
name: self.get_expected_aovs(
|
||||
output_file, name, start_frame,
|
||||
end_frame, img_fmt)
|
||||
})
|
||||
|
||||
elif renderer == "Arnold":
|
||||
render_name = self.get_arnold_product_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
render_dict.update({
|
||||
name: self.get_expected_arnold_product(
|
||||
output_file, name, start_frame,
|
||||
end_frame, img_fmt)
|
||||
})
|
||||
elif renderer in [
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3"
|
||||
]:
|
||||
if img_fmt != "exr":
|
||||
render_name = self.get_render_elements_name()
|
||||
if render_name:
|
||||
for name in render_name:
|
||||
render_dict.update({
|
||||
name: self.get_expected_aovs(
|
||||
output_file, name, start_frame,
|
||||
end_frame, img_fmt) # noqa
|
||||
})
|
||||
|
||||
return render_dict
|
||||
|
||||
def get_expected_beauty(self, folder, start_frame, end_frame, fmt):
|
||||
beauty_frame_range = []
|
||||
for f in range(start_frame, end_frame):
|
||||
frame = "%04d" % f
|
||||
beauty_output = f"{folder}.{frame}.{fmt}"
|
||||
beauty_output = beauty_output.replace("\\", "/")
|
||||
beauty_frame_range.append(beauty_output)
|
||||
|
||||
return beauty_frame_range
|
||||
|
||||
def get_arnold_product_name(self):
|
||||
"""Get all the Arnold AOVs name"""
|
||||
aov_name = []
|
||||
|
||||
amw = rt.MaxToAOps.AOVsManagerWindow()
|
||||
aov_mgr = rt.renderers.current.AOVManager
|
||||
# Check if there is any aov group set in AOV manager
|
||||
aov_group_num = len(aov_mgr.drivers)
|
||||
if aov_group_num < 1:
|
||||
return
|
||||
for i in range(aov_group_num):
|
||||
# get the specific AOV group
|
||||
aov_name.extend(aov.name for aov in aov_mgr.drivers[i].aov_list)
|
||||
# close the AOVs manager window
|
||||
amw.close()
|
||||
|
||||
return aov_name
|
||||
|
||||
def get_expected_arnold_product(self, folder, name,
|
||||
start_frame, end_frame, fmt):
|
||||
"""Get all the expected Arnold AOVs"""
|
||||
aov_list = []
|
||||
for f in range(start_frame, end_frame):
|
||||
frame = "%04d" % f
|
||||
render_element = f"{folder}_{name}.{frame}.{fmt}"
|
||||
render_element = render_element.replace("\\", "/")
|
||||
aov_list.append(render_element)
|
||||
|
||||
return aov_list
|
||||
|
||||
def get_render_elements_name(self):
|
||||
"""Get all the render element names for general """
|
||||
render_name = []
|
||||
render_elem = rt.maxOps.GetCurRenderElementMgr()
|
||||
render_elem_num = render_elem.NumRenderElements()
|
||||
if render_elem_num < 1:
|
||||
return
|
||||
# get render elements from the renders
|
||||
for i in range(render_elem_num):
|
||||
renderlayer_name = render_elem.GetRenderElement(i)
|
||||
if renderlayer_name.enabled:
|
||||
target, renderpass = str(renderlayer_name).split(":")
|
||||
render_name.append(renderpass)
|
||||
|
||||
return render_name
|
||||
|
||||
def get_expected_aovs(self, folder, name,
|
||||
start_frame, end_frame, fmt):
|
||||
"""Get all the expected render element output files. """
|
||||
render_elements = []
|
||||
for f in range(start_frame, end_frame):
|
||||
frame = "%04d" % f
|
||||
render_element = f"{folder}_{name}.{frame}.{fmt}"
|
||||
render_element = render_element.replace("\\", "/")
|
||||
render_elements.append(render_element)
|
||||
|
||||
return render_elements
|
||||
|
||||
def image_format(self):
|
||||
return self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
|
@ -1,227 +0,0 @@
|
|||
import os
|
||||
from pymxs import runtime as rt
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
|
||||
from ayon_max.api.lib import (
|
||||
set_render_frame_range,
|
||||
get_current_renderer,
|
||||
get_default_render_folder
|
||||
)
|
||||
|
||||
|
||||
class RenderSettings(object):
|
||||
|
||||
log = Logger.get_logger("RenderSettings")
|
||||
|
||||
_aov_chars = {
|
||||
"dot": ".",
|
||||
"dash": "-",
|
||||
"underscore": "_"
|
||||
}
|
||||
|
||||
def __init__(self, project_settings=None):
|
||||
"""
|
||||
Set up the naming convention for the render
|
||||
elements for the deadline submission
|
||||
"""
|
||||
|
||||
self._project_settings = project_settings
|
||||
if not self._project_settings:
|
||||
self._project_settings = get_project_settings(
|
||||
get_current_project_name()
|
||||
)
|
||||
|
||||
def set_render_camera(self, selection):
|
||||
for sel in selection:
|
||||
# to avoid Attribute Error from pymxs wrapper
|
||||
if rt.classOf(sel) in rt.Camera.classes:
|
||||
rt.viewport.setCamera(sel)
|
||||
return
|
||||
raise RuntimeError("Active Camera not found")
|
||||
|
||||
def render_output(self, container):
|
||||
folder = rt.maxFilePath
|
||||
# hard-coded, should be customized in the setting
|
||||
file = rt.maxFileName
|
||||
folder = folder.replace("\\", "/")
|
||||
# hard-coded, set the renderoutput path
|
||||
setting = self._project_settings
|
||||
render_folder = get_default_render_folder(setting)
|
||||
filename, ext = os.path.splitext(file)
|
||||
output_dir = os.path.join(folder,
|
||||
render_folder,
|
||||
filename)
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
# hard-coded, should be customized in the setting
|
||||
folder_attributes = get_current_folder_entity()["attrib"]
|
||||
|
||||
# get project resolution
|
||||
width = folder_attributes.get("resolutionWidth")
|
||||
height = folder_attributes.get("resolutionHeight")
|
||||
# Set Frame Range
|
||||
frame_start = folder_attributes.get("frame_start")
|
||||
frame_end = folder_attributes.get("frame_end")
|
||||
set_render_frame_range(frame_start, frame_end)
|
||||
# get the production render
|
||||
renderer_class = get_current_renderer()
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
output = os.path.join(output_dir, container)
|
||||
try:
|
||||
aov_separator = self._aov_chars[(
|
||||
self._project_settings["max"]
|
||||
["RenderSettings"]
|
||||
["aov_separator"]
|
||||
)]
|
||||
except KeyError:
|
||||
aov_separator = "."
|
||||
output_filename = f"{output}..{img_fmt}"
|
||||
output_filename = output_filename.replace("{aov_separator}",
|
||||
aov_separator)
|
||||
rt.rendOutputFilename = output_filename
|
||||
if renderer == "VUE_File_Renderer":
|
||||
return
|
||||
# TODO: Finish the arnold render setup
|
||||
if renderer == "Arnold":
|
||||
self.arnold_setup()
|
||||
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"Redshift_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
self.render_element_layer(output, width, height, img_fmt)
|
||||
|
||||
rt.rendSaveFile = True
|
||||
|
||||
if rt.renderSceneDialog.isOpen():
|
||||
rt.renderSceneDialog.close()
|
||||
|
||||
def arnold_setup(self):
|
||||
# get Arnold RenderView run in the background
|
||||
# for setting up renderable camera
|
||||
arv = rt.MAXToAOps.ArnoldRenderView()
|
||||
render_camera = rt.viewport.GetCamera()
|
||||
if render_camera:
|
||||
arv.setOption("Camera", str(render_camera))
|
||||
|
||||
# TODO: add AOVs and extension
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
setup_cmd = (
|
||||
f"""
|
||||
amw = MaxtoAOps.AOVsManagerWindow()
|
||||
amw.close()
|
||||
aovmgr = renderers.current.AOVManager
|
||||
aovmgr.drivers = #()
|
||||
img_fmt = "{img_fmt}"
|
||||
if img_fmt == "png" then driver = ArnoldPNGDriver()
|
||||
if img_fmt == "jpg" then driver = ArnoldJPEGDriver()
|
||||
if img_fmt == "exr" then driver = ArnoldEXRDriver()
|
||||
if img_fmt == "tif" then driver = ArnoldTIFFDriver()
|
||||
if img_fmt == "tiff" then driver = ArnoldTIFFDriver()
|
||||
append aovmgr.drivers driver
|
||||
aovmgr.drivers[1].aov_list = #()
|
||||
""")
|
||||
|
||||
rt.execute(setup_cmd)
|
||||
arv.close()
|
||||
|
||||
def render_element_layer(self, dir, width, height, ext):
|
||||
"""For Renderers with render elements"""
|
||||
rt.renderWidth = width
|
||||
rt.renderHeight = height
|
||||
render_elem = rt.maxOps.GetCurRenderElementMgr()
|
||||
render_elem_num = render_elem.NumRenderElements()
|
||||
if render_elem_num < 0:
|
||||
return
|
||||
|
||||
for i in range(render_elem_num):
|
||||
renderlayer_name = render_elem.GetRenderElement(i)
|
||||
target, renderpass = str(renderlayer_name).split(":")
|
||||
aov_name = f"{dir}_{renderpass}..{ext}"
|
||||
render_elem.SetRenderElementFileName(i, aov_name)
|
||||
|
||||
def get_render_output(self, container, output_dir):
|
||||
output = os.path.join(output_dir, container)
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
output_filename = f"{output}..{img_fmt}"
|
||||
return output_filename
|
||||
|
||||
def get_render_element(self):
|
||||
orig_render_elem = []
|
||||
render_elem = rt.maxOps.GetCurRenderElementMgr()
|
||||
render_elem_num = render_elem.NumRenderElements()
|
||||
if render_elem_num < 0:
|
||||
return
|
||||
|
||||
for i in range(render_elem_num):
|
||||
render_element = render_elem.GetRenderElementFilename(i)
|
||||
orig_render_elem.append(render_element)
|
||||
|
||||
return orig_render_elem
|
||||
|
||||
def get_batch_render_elements(self, container,
|
||||
output_dir, camera):
|
||||
render_element_list = list()
|
||||
output = os.path.join(output_dir, container)
|
||||
render_elem = rt.maxOps.GetCurRenderElementMgr()
|
||||
render_elem_num = render_elem.NumRenderElements()
|
||||
if render_elem_num < 0:
|
||||
return
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
||||
for i in range(render_elem_num):
|
||||
renderlayer_name = render_elem.GetRenderElement(i)
|
||||
target, renderpass = str(renderlayer_name).split(":")
|
||||
aov_name = f"{output}_{camera}_{renderpass}..{img_fmt}"
|
||||
render_element_list.append(aov_name)
|
||||
return render_element_list
|
||||
|
||||
def get_batch_render_output(self, camera):
|
||||
target_layer_no = rt.batchRenderMgr.FindView(camera)
|
||||
target_layer = rt.batchRenderMgr.GetView(target_layer_no)
|
||||
return target_layer.outputFilename
|
||||
|
||||
def batch_render_elements(self, camera):
|
||||
target_layer_no = rt.batchRenderMgr.FindView(camera)
|
||||
target_layer = rt.batchRenderMgr.GetView(target_layer_no)
|
||||
outputfilename = target_layer.outputFilename
|
||||
directory = os.path.dirname(outputfilename)
|
||||
render_elem = rt.maxOps.GetCurRenderElementMgr()
|
||||
render_elem_num = render_elem.NumRenderElements()
|
||||
if render_elem_num < 0:
|
||||
return
|
||||
ext = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
||||
for i in range(render_elem_num):
|
||||
renderlayer_name = render_elem.GetRenderElement(i)
|
||||
target, renderpass = str(renderlayer_name).split(":")
|
||||
aov_name = f"{directory}_{camera}_{renderpass}..{ext}"
|
||||
render_elem.SetRenderElementFileName(i, aov_name)
|
||||
|
||||
def batch_render_layer(self, container,
|
||||
output_dir, cameras):
|
||||
outputs = list()
|
||||
output = os.path.join(output_dir, container)
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
for cam in cameras:
|
||||
camera = rt.getNodeByName(cam)
|
||||
layer_no = rt.batchRenderMgr.FindView(cam)
|
||||
renderlayer = None
|
||||
if layer_no == 0:
|
||||
renderlayer = rt.batchRenderMgr.CreateView(camera)
|
||||
else:
|
||||
renderlayer = rt.batchRenderMgr.GetView(layer_no)
|
||||
# use camera name as renderlayer name
|
||||
renderlayer.name = cam
|
||||
renderlayer.outputFilename = f"{output}_{cam}..{img_fmt}"
|
||||
outputs.append(renderlayer.outputFilename)
|
||||
return outputs
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""3dsmax menu definition of AYON."""
|
||||
import os
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_max.api import lib
|
||||
|
||||
|
||||
class AYONMenu(object):
|
||||
"""Object representing AYON menu.
|
||||
|
||||
This is using "hack" to inject itself before "Help" menu of 3dsmax.
|
||||
For some reason `postLoadingMenus` event doesn't fire, and main menu
|
||||
if probably re-initialized by menu templates, se we wait for at least
|
||||
1 event Qt event loop before trying to insert.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.main_widget = self.get_main_widget()
|
||||
self.menu = None
|
||||
|
||||
timer = QtCore.QTimer()
|
||||
# set number of event loops to wait.
|
||||
timer.setInterval(1)
|
||||
timer.timeout.connect(self._on_timer)
|
||||
timer.start()
|
||||
|
||||
self._timer = timer
|
||||
self._counter = 0
|
||||
|
||||
def _on_timer(self):
|
||||
if self._counter < 1:
|
||||
self._counter += 1
|
||||
return
|
||||
|
||||
self._counter = 0
|
||||
self._timer.stop()
|
||||
self._build_ayon_menu()
|
||||
|
||||
@staticmethod
|
||||
def get_main_widget():
|
||||
"""Get 3dsmax main window."""
|
||||
return QtWidgets.QWidget.find(rt.windows.getMAXHWND())
|
||||
|
||||
def get_main_menubar(self) -> QtWidgets.QMenuBar:
|
||||
"""Get main Menubar by 3dsmax main window."""
|
||||
return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0]
|
||||
|
||||
def _get_or_create_ayon_menu(
|
||||
self, name: str = "&AYON",
|
||||
before: str = "&Help") -> QtWidgets.QAction:
|
||||
"""Create AYON menu.
|
||||
|
||||
Args:
|
||||
name (str, Optional): AYON menu name.
|
||||
before (str, Optional): Name of the 3dsmax main menu item to
|
||||
add AYON menu before.
|
||||
|
||||
Returns:
|
||||
QtWidgets.QAction: AYON menu action.
|
||||
|
||||
"""
|
||||
if self.menu is not None:
|
||||
return self.menu
|
||||
|
||||
menu_bar = self.get_main_menubar()
|
||||
menu_items = menu_bar.findChildren(
|
||||
QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly)
|
||||
help_action = None
|
||||
for item in menu_items:
|
||||
if name in item.title():
|
||||
# we already have AYON menu
|
||||
return item
|
||||
|
||||
if before in item.title():
|
||||
help_action = item.menuAction()
|
||||
tab_menu_label = os.environ.get("AYON_MENU_LABEL") or "AYON"
|
||||
op_menu = QtWidgets.QMenu("&{}".format(tab_menu_label))
|
||||
menu_bar.insertMenu(help_action, op_menu)
|
||||
|
||||
self.menu = op_menu
|
||||
return op_menu
|
||||
|
||||
def _build_ayon_menu(self) -> QtWidgets.QAction:
|
||||
"""Build items in AYON menu."""
|
||||
ayon_menu = self._get_or_create_ayon_menu()
|
||||
load_action = QtWidgets.QAction("Load...", ayon_menu)
|
||||
load_action.triggered.connect(self.load_callback)
|
||||
ayon_menu.addAction(load_action)
|
||||
|
||||
publish_action = QtWidgets.QAction("Publish...", ayon_menu)
|
||||
publish_action.triggered.connect(self.publish_callback)
|
||||
ayon_menu.addAction(publish_action)
|
||||
|
||||
manage_action = QtWidgets.QAction("Manage...", ayon_menu)
|
||||
manage_action.triggered.connect(self.manage_callback)
|
||||
ayon_menu.addAction(manage_action)
|
||||
|
||||
library_action = QtWidgets.QAction("Library...", ayon_menu)
|
||||
library_action.triggered.connect(self.library_callback)
|
||||
ayon_menu.addAction(library_action)
|
||||
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
workfiles_action = QtWidgets.QAction("Work Files...", ayon_menu)
|
||||
workfiles_action.triggered.connect(self.workfiles_callback)
|
||||
ayon_menu.addAction(workfiles_action)
|
||||
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
res_action = QtWidgets.QAction("Set Resolution", ayon_menu)
|
||||
res_action.triggered.connect(self.resolution_callback)
|
||||
ayon_menu.addAction(res_action)
|
||||
|
||||
frame_action = QtWidgets.QAction("Set Frame Range", ayon_menu)
|
||||
frame_action.triggered.connect(self.frame_range_callback)
|
||||
ayon_menu.addAction(frame_action)
|
||||
|
||||
colorspace_action = QtWidgets.QAction("Set Colorspace", ayon_menu)
|
||||
colorspace_action.triggered.connect(self.colorspace_callback)
|
||||
ayon_menu.addAction(colorspace_action)
|
||||
|
||||
unit_scale_action = QtWidgets.QAction("Set Unit Scale", ayon_menu)
|
||||
unit_scale_action.triggered.connect(self.unit_scale_callback)
|
||||
ayon_menu.addAction(unit_scale_action)
|
||||
|
||||
return ayon_menu
|
||||
|
||||
def load_callback(self):
|
||||
"""Callback to show Loader tool."""
|
||||
host_tools.show_loader(parent=self.main_widget)
|
||||
|
||||
def publish_callback(self):
|
||||
"""Callback to show Publisher tool."""
|
||||
host_tools.show_publisher(parent=self.main_widget)
|
||||
|
||||
def manage_callback(self):
|
||||
"""Callback to show Scene Manager/Inventory tool."""
|
||||
host_tools.show_scene_inventory(parent=self.main_widget)
|
||||
|
||||
def library_callback(self):
|
||||
"""Callback to show Library Loader tool."""
|
||||
host_tools.show_library_loader(parent=self.main_widget)
|
||||
|
||||
def workfiles_callback(self):
|
||||
"""Callback to show Workfiles tool."""
|
||||
host_tools.show_workfiles(parent=self.main_widget)
|
||||
|
||||
def resolution_callback(self):
|
||||
"""Callback to reset scene resolution"""
|
||||
return lib.reset_scene_resolution()
|
||||
|
||||
def frame_range_callback(self):
|
||||
"""Callback to reset frame range"""
|
||||
return lib.reset_frame_range()
|
||||
|
||||
def colorspace_callback(self):
|
||||
"""Callback to reset colorspace"""
|
||||
return lib.reset_colorspace()
|
||||
|
||||
def unit_scale_callback(self):
|
||||
"""Callback to reset unit scale"""
|
||||
return lib.reset_unit_scale()
|
||||
|
|
@ -1,297 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for AYON 3ds max integration."""
|
||||
import os
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
|
||||
import json
|
||||
|
||||
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import (
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_max.api.menu import AYONMenu
|
||||
from ayon_max.api import lib
|
||||
from ayon_max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from ayon_max import MAX_HOST_DIR
|
||||
|
||||
from pymxs import runtime as rt # noqa
|
||||
|
||||
log = logging.getLogger("ayon_max")
|
||||
|
||||
PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
|
||||
name = "max"
|
||||
menu = None
|
||||
|
||||
def __init__(self):
|
||||
super(MaxHost, self).__init__()
|
||||
self._op_events = {}
|
||||
self._has_been_setup = False
|
||||
|
||||
def install(self):
|
||||
pyblish.api.register_host("max")
|
||||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# self._register_callbacks()
|
||||
self.menu = AYONMenu()
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
rt.callbacks.addScript(rt.Name('systemPostNew'), on_new)
|
||||
|
||||
rt.callbacks.addScript(rt.Name('filePostOpen'),
|
||||
lib.check_colorspace)
|
||||
|
||||
rt.callbacks.addScript(rt.Name('postWorkspaceChange'),
|
||||
self._deferred_menu_creation)
|
||||
rt.NodeEventCallback(
|
||||
nameChanged=lib.update_modifier_node_names)
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return rt.getSaveRequired()
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".max"]
|
||||
|
||||
def save_workfile(self, dst_path=None):
|
||||
rt.saveMaxFile(dst_path)
|
||||
return dst_path
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
rt.checkForSave()
|
||||
rt.loadMaxFile(filepath)
|
||||
return filepath
|
||||
|
||||
def get_current_workfile(self):
|
||||
return os.path.join(rt.maxFilePath, rt.maxFileName)
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def _register_callbacks(self):
|
||||
rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks"))
|
||||
|
||||
rt.callbacks.addScript(
|
||||
rt.Name("postLoadingMenus"),
|
||||
self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks'))
|
||||
|
||||
def _deferred_menu_creation(self):
|
||||
self.log.info("Building menu ...")
|
||||
self.menu = AYONMenu()
|
||||
|
||||
@staticmethod
|
||||
def create_context_node():
|
||||
"""Helper for creating context holding node."""
|
||||
|
||||
root_scene = rt.rootScene
|
||||
|
||||
create_attr_script = ("""
|
||||
attributes "OpenPypeContext"
|
||||
(
|
||||
parameters main rollout:params
|
||||
(
|
||||
context type: #string
|
||||
)
|
||||
|
||||
rollout params "OpenPype Parameters"
|
||||
(
|
||||
editText editTextContext "Context" type: #string
|
||||
)
|
||||
)
|
||||
""")
|
||||
|
||||
attr = rt.execute(create_attr_script)
|
||||
rt.custAttributes.add(root_scene, attr)
|
||||
|
||||
return root_scene.OpenPypeContext.context
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
try:
|
||||
_ = rt.rootScene.OpenPypeContext.context
|
||||
except AttributeError:
|
||||
# context node doesn't exists
|
||||
self.create_context_node()
|
||||
|
||||
rt.rootScene.OpenPypeContext.context = json.dumps(data)
|
||||
|
||||
def get_context_data(self):
|
||||
try:
|
||||
context = rt.rootScene.OpenPypeContext.context
|
||||
except AttributeError:
|
||||
# context node doesn't exists
|
||||
context = self.create_context_node()
|
||||
if not context:
|
||||
context = "{}"
|
||||
return json.loads(context)
|
||||
|
||||
def save_file(self, dst_path=None):
|
||||
# Force forwards slashes to avoid segfault
|
||||
dst_path = dst_path.replace("\\", "/")
|
||||
rt.saveMaxFile(dst_path)
|
||||
|
||||
|
||||
def parse_container(container):
|
||||
"""Return the container node's full container data.
|
||||
|
||||
Args:
|
||||
container (str): A container node name.
|
||||
|
||||
Returns:
|
||||
dict: The container schema data for this container node.
|
||||
|
||||
"""
|
||||
data = lib.read(container)
|
||||
|
||||
# Backwards compatibility pre-schemas for containers
|
||||
data["schema"] = data.get("schema", "openpype:container-3.0")
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = container.Name
|
||||
return data
|
||||
|
||||
|
||||
def ls():
|
||||
"""Get all AYON containers."""
|
||||
objs = rt.objects
|
||||
containers = [
|
||||
obj for obj in objs
|
||||
if rt.getUserProp(obj, "id") in {
|
||||
AYON_CONTAINER_ID, AVALON_CONTAINER_ID
|
||||
}
|
||||
]
|
||||
|
||||
for container in sorted(containers, key=attrgetter("name")):
|
||||
yield parse_container(container)
|
||||
|
||||
|
||||
def on_new():
|
||||
lib.set_context_setting()
|
||||
if rt.checkForSave():
|
||||
rt.resetMaxFile(rt.Name("noPrompt"))
|
||||
rt.clearUndoBuffer()
|
||||
rt.redrawViews()
|
||||
|
||||
|
||||
def containerise(name: str, nodes: list, context,
|
||||
namespace=None, loader=None, suffix="_CON"):
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or "",
|
||||
"loader": loader,
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
container_name = f"{namespace}:{name}{suffix}"
|
||||
container = rt.container(name=container_name)
|
||||
import_custom_attribute_data(container, nodes)
|
||||
if not lib.imprint(container_name, data):
|
||||
print(f"imprinting of {container_name} failed.")
|
||||
return container
|
||||
|
||||
|
||||
def load_custom_attribute_data():
|
||||
"""Re-loading the AYON custom parameter built by the creator
|
||||
|
||||
Returns:
|
||||
attribute: re-loading the custom OP attributes set in Maxscript
|
||||
"""
|
||||
return rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
|
||||
|
||||
def import_custom_attribute_data(container: str, selections: list):
|
||||
"""Importing the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
attrs = load_custom_attribute_data()
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(container, modifier)
|
||||
container.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(container.modifiers[0], attrs)
|
||||
node_list = []
|
||||
sel_list = []
|
||||
for i in selections:
|
||||
node_ref = rt.NodeTransformMonitor(node=i)
|
||||
node_list.append(node_ref)
|
||||
sel_list.append(str(i))
|
||||
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"all_handles", node_list)
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"sel_list", sel_list)
|
||||
|
||||
|
||||
def update_custom_attribute_data(container: str, selections: list):
|
||||
"""Updating the AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
if container.modifiers[0].name == "OP Data":
|
||||
rt.deleteModifier(container, container.modifiers[0])
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
|
||||
def get_previous_loaded_object(container: str):
|
||||
"""Get previous loaded_object through the OP data
|
||||
|
||||
Args:
|
||||
container (str): the container which stores the OP data
|
||||
|
||||
Returns:
|
||||
node_list(list): list of nodes which are previously loaded
|
||||
"""
|
||||
node_list = []
|
||||
node_transform_monitor_list = rt.getProperty(
|
||||
container.modifiers[0].openPypeData, "all_handles")
|
||||
for node_transform_monitor in node_transform_monitor_list:
|
||||
node_list.append(node_transform_monitor.node)
|
||||
return node_list
|
||||
|
||||
|
||||
def remove_container_data(container_node: str):
|
||||
"""Function to remove container data after updating, switching or deleting it.
|
||||
|
||||
Args:
|
||||
container_node (str): container node
|
||||
"""
|
||||
if container_node.modifiers[0].name == "OP Data":
|
||||
all_set_members_names = [
|
||||
member.node for member
|
||||
in container_node.modifiers[0].openPypeData.all_handles]
|
||||
# clean up the children of alembic dummy objects
|
||||
for current_set_member in all_set_members_names:
|
||||
shape_list = [members for members in current_set_member.Children
|
||||
if rt.ClassOf(members) == rt.AlembicObject
|
||||
or rt.isValidNode(members)]
|
||||
if shape_list: # noqa
|
||||
rt.Delete(shape_list)
|
||||
rt.Delete(current_set_member)
|
||||
rt.deleteModifier(container_node, container_node.modifiers[0])
|
||||
|
||||
rt.Delete(container_node)
|
||||
rt.redrawViews()
|
||||
|
|
@ -1,298 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""3dsmax specific AYON/Pyblish plugin definitions."""
|
||||
from abc import ABCMeta
|
||||
|
||||
import six
|
||||
from pymxs import runtime as rt
|
||||
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_core.pipeline import (
|
||||
CreatedInstance,
|
||||
Creator,
|
||||
CreatorError,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
)
|
||||
|
||||
from .lib import imprint, lsattr, read
|
||||
|
||||
MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
||||
(
|
||||
parameters main rollout:OPparams
|
||||
(
|
||||
all_handles type:#maxObjectTab tabSize:0 tabSizeVariable:on
|
||||
sel_list type:#stringTab tabSize:0 tabSizeVariable:on
|
||||
)
|
||||
|
||||
rollout OPparams "OP Parameters"
|
||||
(
|
||||
listbox list_node "Node References" items:#()
|
||||
button button_add "Add to Container"
|
||||
button button_del "Delete from Container"
|
||||
|
||||
fn node_to_name the_node =
|
||||
(
|
||||
handle = the_node.handle
|
||||
obj_name = the_node.name
|
||||
handle_name = obj_name + "<" + handle as string + ">"
|
||||
return handle_name
|
||||
)
|
||||
fn nodes_to_add node =
|
||||
(
|
||||
sceneObjs = #()
|
||||
if classOf node == Container do return false
|
||||
n = node as string
|
||||
for obj in Objects do
|
||||
(
|
||||
tmp_obj = obj as string
|
||||
append sceneObjs tmp_obj
|
||||
)
|
||||
if sel_list != undefined do
|
||||
(
|
||||
for obj in sel_list do
|
||||
(
|
||||
idx = findItem sceneObjs obj
|
||||
if idx do
|
||||
(
|
||||
deleteItem sceneObjs idx
|
||||
)
|
||||
)
|
||||
)
|
||||
idx = findItem sceneObjs n
|
||||
if idx then return true else false
|
||||
)
|
||||
|
||||
fn nodes_to_rmv node =
|
||||
(
|
||||
n = node as string
|
||||
idx = findItem sel_list n
|
||||
if idx then return true else false
|
||||
)
|
||||
|
||||
on button_add pressed do
|
||||
(
|
||||
current_sel = selectByName title:"Select Objects to add to
|
||||
the Container" buttontext:"Add" filter:nodes_to_add
|
||||
if current_sel == undefined then return False
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
for c in current_sel do
|
||||
(
|
||||
handle_name = node_to_name c
|
||||
node_ref = NodeTransformMonitor node:c
|
||||
idx = finditem list_node.items handle_name
|
||||
if idx do (
|
||||
continue
|
||||
)
|
||||
name = c as string
|
||||
append temp_arr handle_name
|
||||
append i_node_arr node_ref
|
||||
append sel_list name
|
||||
)
|
||||
all_handles = join i_node_arr all_handles
|
||||
list_node.items = join temp_arr list_node.items
|
||||
)
|
||||
|
||||
on button_del pressed do
|
||||
(
|
||||
current_sel = selectByName title:"Select Objects to remove
|
||||
from the Container" buttontext:"Remove" filter: nodes_to_rmv
|
||||
if current_sel == undefined or current_sel.count == 0 then
|
||||
(
|
||||
return False
|
||||
)
|
||||
temp_arr = #()
|
||||
i_node_arr = #()
|
||||
new_i_node_arr = #()
|
||||
new_temp_arr = #()
|
||||
|
||||
for c in current_sel do
|
||||
(
|
||||
node_ref = NodeTransformMonitor node:c as string
|
||||
handle_name = node_to_name c
|
||||
n = c as string
|
||||
tmp_all_handles = #()
|
||||
for i in all_handles do
|
||||
(
|
||||
tmp = i as string
|
||||
append tmp_all_handles tmp
|
||||
)
|
||||
idx = finditem tmp_all_handles node_ref
|
||||
if idx do
|
||||
(
|
||||
new_i_node_arr = DeleteItem all_handles idx
|
||||
|
||||
)
|
||||
idx = finditem list_node.items handle_name
|
||||
if idx do
|
||||
(
|
||||
new_temp_arr = DeleteItem list_node.items idx
|
||||
)
|
||||
idx = finditem sel_list n
|
||||
if idx do
|
||||
(
|
||||
sel_list = DeleteItem sel_list idx
|
||||
)
|
||||
)
|
||||
all_handles = join i_node_arr new_i_node_arr
|
||||
list_node.items = join temp_arr new_temp_arr
|
||||
)
|
||||
|
||||
on OPparams open do
|
||||
(
|
||||
if all_handles.count != 0 then
|
||||
(
|
||||
temp_arr = #()
|
||||
for x in all_handles do
|
||||
(
|
||||
if x.node == undefined do continue
|
||||
handle_name = node_to_name x.node
|
||||
append temp_arr handle_name
|
||||
)
|
||||
list_node.items = temp_arr
|
||||
)
|
||||
)
|
||||
)
|
||||
)"""
|
||||
|
||||
|
||||
class MaxCreatorBase(object):
|
||||
|
||||
@staticmethod
|
||||
def cache_instance_data(shared_data):
|
||||
if shared_data.get("max_cached_instances") is not None:
|
||||
return shared_data
|
||||
|
||||
shared_data["max_cached_instances"] = {}
|
||||
|
||||
cached_instances = []
|
||||
for id_type in [AYON_INSTANCE_ID, AVALON_INSTANCE_ID]:
|
||||
cached_instances.extend(lsattr("id", id_type))
|
||||
|
||||
for i in cached_instances:
|
||||
creator_id = rt.GetUserProp(i, "creator_identifier")
|
||||
if creator_id not in shared_data["max_cached_instances"]:
|
||||
shared_data["max_cached_instances"][creator_id] = [i.name]
|
||||
else:
|
||||
shared_data[
|
||||
"max_cached_instances"][creator_id].append(i.name)
|
||||
return shared_data
|
||||
|
||||
@staticmethod
|
||||
def create_instance_node(node):
|
||||
"""Create instance node.
|
||||
|
||||
If the supplied node is existing node, it will be used to hold the
|
||||
instance, otherwise new node of type Dummy will be created.
|
||||
|
||||
Args:
|
||||
node (rt.MXSWrapperBase, str): Node or node name to use.
|
||||
|
||||
Returns:
|
||||
instance
|
||||
"""
|
||||
if isinstance(node, str):
|
||||
node = rt.Container(name=node)
|
||||
|
||||
attrs = rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(node, modifier)
|
||||
node.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(node.modifiers[0], attrs)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class MaxCreator(Creator, MaxCreatorBase):
|
||||
selected_nodes = []
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
self.selected_nodes = rt.GetCurrentSelection()
|
||||
if rt.getNodeByName(product_name):
|
||||
raise CreatorError(f"'{product_name}' is already created..")
|
||||
|
||||
instance_node = self.create_instance_node(product_name)
|
||||
instance_data["instance_node"] = instance_node.name
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self
|
||||
)
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
||||
node_list = []
|
||||
sel_list = []
|
||||
for i in self.selected_nodes:
|
||||
node_ref = rt.NodeTransformMonitor(node=i)
|
||||
node_list.append(node_ref)
|
||||
sel_list.append(str(i))
|
||||
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
instance_node.modifiers[0].openPypeData,
|
||||
"all_handles", node_list)
|
||||
rt.setProperty(
|
||||
instance_node.modifiers[0].openPypeData,
|
||||
"sel_list", sel_list)
|
||||
|
||||
self._add_instance_to_context(instance)
|
||||
imprint(instance_node.name, instance.data_to_store())
|
||||
|
||||
return instance
|
||||
|
||||
def collect_instances(self):
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data["max_cached_instances"].get(self.identifier, []): # noqa
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(rt.GetNodeByName(instance)), self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.get("instance_node")
|
||||
new_values = {
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
product_name = new_values.get("productName", "")
|
||||
if product_name and instance_node != product_name:
|
||||
node = rt.getNodeByName(instance_node)
|
||||
new_product_name = new_values["productName"]
|
||||
if rt.getNodeByName(new_product_name):
|
||||
raise CreatorError(
|
||||
"The product '{}' already exists.".format(
|
||||
new_product_name))
|
||||
instance_node = new_product_name
|
||||
created_inst["instance_node"] = instance_node
|
||||
node.name = instance_node
|
||||
|
||||
imprint(
|
||||
instance_node,
|
||||
created_inst.data_to_store(),
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Remove specified instance from the scene.
|
||||
|
||||
This is only removing `id` parameter so instance is no longer
|
||||
instance, because it might contain valuable data for artist.
|
||||
|
||||
"""
|
||||
for instance in instances:
|
||||
instance_node = rt.GetNodeByName(
|
||||
instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
count = rt.custAttributes.count(instance_node.modifiers[0])
|
||||
rt.custAttributes.delete(instance_node.modifiers[0], count)
|
||||
rt.Delete(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection")
|
||||
]
|
||||
|
|
@ -1,344 +0,0 @@
|
|||
import logging
|
||||
import contextlib
|
||||
from pymxs import runtime as rt
|
||||
from .lib import get_max_version, render_resolution
|
||||
|
||||
log = logging.getLogger("ayon_max")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def play_preview_when_done(has_autoplay):
|
||||
"""Set preview playback option during context
|
||||
|
||||
Args:
|
||||
has_autoplay (bool): autoplay during creating
|
||||
preview animation
|
||||
"""
|
||||
current_playback = rt.preferences.playPreviewWhenDone
|
||||
try:
|
||||
rt.preferences.playPreviewWhenDone = has_autoplay
|
||||
yield
|
||||
finally:
|
||||
rt.preferences.playPreviewWhenDone = current_playback
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def viewport_layout_and_camera(camera, layout="layout_1"):
|
||||
"""Set viewport layout and camera during context
|
||||
***For 3dsMax 2024+
|
||||
Args:
|
||||
camera (str): viewport camera
|
||||
layout (str): layout to use in viewport, defaults to `layout_1`
|
||||
Use None to not change viewport layout during context.
|
||||
"""
|
||||
needs_maximise = 0
|
||||
# Set to first active non extended viewport
|
||||
rt.viewport.activeViewportEx(1)
|
||||
original_camera = rt.viewport.getCamera()
|
||||
original_type = rt.viewport.getType()
|
||||
review_camera = rt.getNodeByName(camera)
|
||||
|
||||
try:
|
||||
if rt.viewport.getLayout() != rt.name(layout):
|
||||
rt.execute("max tool maximize")
|
||||
needs_maximise = 1
|
||||
rt.viewport.setCamera(review_camera)
|
||||
yield
|
||||
finally:
|
||||
if needs_maximise == 1:
|
||||
rt.execute("max tool maximize")
|
||||
if original_type == rt.Name("view_camera"):
|
||||
rt.viewport.setCamera(original_camera)
|
||||
else:
|
||||
rt.viewport.setType(original_type)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def viewport_preference_setting(general_viewport,
|
||||
nitrous_manager,
|
||||
nitrous_viewport,
|
||||
vp_button_mgr):
|
||||
"""Function to set viewport setting during context
|
||||
***For Max Version < 2024
|
||||
Args:
|
||||
camera (str): Viewport camera for review render
|
||||
general_viewport (dict): General viewport setting
|
||||
nitrous_manager (dict): Nitrous graphic manager
|
||||
nitrous_viewport (dict): Nitrous setting for
|
||||
preview animation
|
||||
vp_button_mgr (dict): Viewport button manager Setting
|
||||
preview_preferences (dict): Preview Preferences Setting
|
||||
"""
|
||||
orig_vp_grid = rt.viewport.getGridVisibility(1)
|
||||
orig_vp_bkg = rt.viewport.IsSolidBackgroundColorMode()
|
||||
|
||||
nitrousGraphicMgr = rt.NitrousGraphicsManager
|
||||
viewport_setting = nitrousGraphicMgr.GetActiveViewportSetting()
|
||||
vp_button_mgr_original = {
|
||||
key: getattr(rt.ViewportButtonMgr, key) for key in vp_button_mgr
|
||||
}
|
||||
nitrous_manager_original = {
|
||||
key: getattr(nitrousGraphicMgr, key) for key in nitrous_manager
|
||||
}
|
||||
nitrous_viewport_original = {
|
||||
key: getattr(viewport_setting, key) for key in nitrous_viewport
|
||||
}
|
||||
|
||||
try:
|
||||
rt.viewport.setGridVisibility(1, general_viewport["dspGrid"])
|
||||
rt.viewport.EnableSolidBackgroundColorMode(general_viewport["dspBkg"])
|
||||
for key, value in vp_button_mgr.items():
|
||||
setattr(rt.ViewportButtonMgr, key, value)
|
||||
for key, value in nitrous_manager.items():
|
||||
setattr(nitrousGraphicMgr, key, value)
|
||||
for key, value in nitrous_viewport.items():
|
||||
if nitrous_viewport[key] != nitrous_viewport_original[key]:
|
||||
setattr(viewport_setting, key, value)
|
||||
yield
|
||||
|
||||
finally:
|
||||
rt.viewport.setGridVisibility(1, orig_vp_grid)
|
||||
rt.viewport.EnableSolidBackgroundColorMode(orig_vp_bkg)
|
||||
for key, value in vp_button_mgr_original.items():
|
||||
setattr(rt.ViewportButtonMgr, key, value)
|
||||
for key, value in nitrous_manager_original.items():
|
||||
setattr(nitrousGraphicMgr, key, value)
|
||||
for key, value in nitrous_viewport_original.items():
|
||||
setattr(viewport_setting, key, value)
|
||||
|
||||
|
||||
def _render_preview_animation_max_2024(
|
||||
filepath, start, end, percentSize, ext, viewport_options):
|
||||
"""Render viewport preview with MaxScript using `CreateAnimation`.
|
||||
****For 3dsMax 2024+
|
||||
Args:
|
||||
filepath (str): filepath for render output without frame number and
|
||||
extension, for example: /path/to/file
|
||||
start (int): startFrame
|
||||
end (int): endFrame
|
||||
percentSize (float): render resolution multiplier by 100
|
||||
e.g. 100.0 is 1x, 50.0 is 0.5x, 150.0 is 1.5x
|
||||
viewport_options (dict): viewport setting options, e.g.
|
||||
{"vpStyle": "defaultshading", "vpPreset": "highquality"}
|
||||
Returns:
|
||||
list: Created files
|
||||
"""
|
||||
# the percentSize argument must be integer
|
||||
percent = int(percentSize)
|
||||
filepath = filepath.replace("\\", "/")
|
||||
preview_output = f"{filepath}..{ext}"
|
||||
frame_template = f"{filepath}.{{:04d}}.{ext}"
|
||||
job_args = []
|
||||
for key, value in viewport_options.items():
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
job_args.append(f"{key}:{value}")
|
||||
elif isinstance(value, str):
|
||||
if key == "vpStyle":
|
||||
if value == "Realistic":
|
||||
value = "defaultshading"
|
||||
elif value == "Shaded":
|
||||
log.warning(
|
||||
"'Shaded' Mode not supported in "
|
||||
"preview animation in Max 2024.\n"
|
||||
"Using 'defaultshading' instead.")
|
||||
value = "defaultshading"
|
||||
elif value == "ConsistentColors":
|
||||
value = "flatcolor"
|
||||
else:
|
||||
value = value.lower()
|
||||
elif key == "vpPreset":
|
||||
if value == "Quality":
|
||||
value = "highquality"
|
||||
elif value == "Customize":
|
||||
value = "userdefined"
|
||||
else:
|
||||
value = value.lower()
|
||||
job_args.append(f"{key}: #{value}")
|
||||
|
||||
job_str = (
|
||||
f'CreatePreview filename:"{preview_output}" outputAVI:false '
|
||||
f"percentSize:{percent} start:{start} end:{end} "
|
||||
f"{' '.join(job_args)} "
|
||||
"autoPlay:false"
|
||||
)
|
||||
rt.completeRedraw()
|
||||
rt.execute(job_str)
|
||||
# Return the created files
|
||||
return [frame_template.format(frame) for frame in range(start, end + 1)]
|
||||
|
||||
|
||||
def _render_preview_animation_max_pre_2024(
|
||||
filepath, startFrame, endFrame,
|
||||
width, height, percentSize, ext):
|
||||
"""Render viewport animation by creating bitmaps
|
||||
***For 3dsMax Version <2024
|
||||
Args:
|
||||
filepath (str): filepath without frame numbers and extension
|
||||
startFrame (int): start frame
|
||||
endFrame (int): end frame
|
||||
width (int): render resolution width
|
||||
height (int): render resolution height
|
||||
percentSize (float): render resolution multiplier by 100
|
||||
e.g. 100.0 is 1x, 50.0 is 0.5x, 150.0 is 1.5x
|
||||
ext (str): image extension
|
||||
Returns:
|
||||
list: Created filepaths
|
||||
"""
|
||||
|
||||
# get the screenshot
|
||||
percent = percentSize / 100.0
|
||||
res_width = width * percent
|
||||
res_height = height * percent
|
||||
frame_template = "{}.{{:04}}.{}".format(filepath, ext)
|
||||
frame_template.replace("\\", "/")
|
||||
files = []
|
||||
user_cancelled = False
|
||||
for frame in range(startFrame, endFrame + 1):
|
||||
rt.sliderTime = frame
|
||||
filepath = frame_template.format(frame)
|
||||
preview_res = rt.bitmap(
|
||||
res_width, res_height, filename=filepath
|
||||
)
|
||||
dib = rt.gw.getViewportDib()
|
||||
dib_width = float(dib.width)
|
||||
dib_height = float(dib.height)
|
||||
# aspect ratio
|
||||
viewportRatio = dib_width / dib_height
|
||||
renderRatio = float(res_width / res_height)
|
||||
if viewportRatio < renderRatio:
|
||||
heightCrop = (dib_width / renderRatio)
|
||||
topEdge = int((dib_height - heightCrop) / 2.0)
|
||||
tempImage_bmp = rt.bitmap(dib_width, heightCrop)
|
||||
src_box_value = rt.Box2(0, topEdge, dib_width, heightCrop)
|
||||
rt.pasteBitmap(dib, tempImage_bmp, src_box_value, rt.Point2(0, 0))
|
||||
rt.copy(tempImage_bmp, preview_res)
|
||||
rt.close(tempImage_bmp)
|
||||
elif viewportRatio > renderRatio:
|
||||
widthCrop = dib_height * renderRatio
|
||||
leftEdge = int((dib_width - widthCrop) / 2.0)
|
||||
tempImage_bmp = rt.bitmap(widthCrop, dib_height)
|
||||
src_box_value = rt.Box2(leftEdge, 0, widthCrop, dib_height)
|
||||
rt.pasteBitmap(dib, tempImage_bmp, src_box_value, rt.Point2(0, 0))
|
||||
rt.copy(tempImage_bmp, preview_res)
|
||||
rt.close(tempImage_bmp)
|
||||
else:
|
||||
rt.copy(dib, preview_res)
|
||||
rt.save(preview_res)
|
||||
rt.close(preview_res)
|
||||
rt.close(dib)
|
||||
files.append(filepath)
|
||||
if rt.keyboard.escPressed:
|
||||
user_cancelled = True
|
||||
break
|
||||
# clean up the cache
|
||||
rt.gc(delayed=True)
|
||||
if user_cancelled:
|
||||
raise RuntimeError("User cancelled rendering of viewport animation.")
|
||||
return files
|
||||
|
||||
|
||||
def render_preview_animation(
|
||||
filepath,
|
||||
ext,
|
||||
camera,
|
||||
start_frame=None,
|
||||
end_frame=None,
|
||||
percentSize=100.0,
|
||||
width=1920,
|
||||
height=1080,
|
||||
viewport_options=None):
|
||||
"""Render camera review animation
|
||||
Args:
|
||||
filepath (str): filepath to render to, without frame number and
|
||||
extension
|
||||
ext (str): output file extension
|
||||
camera (str): viewport camera for preview render
|
||||
start_frame (int): start frame
|
||||
end_frame (int): end frame
|
||||
percentSize (float): render resolution multiplier by 100
|
||||
e.g. 100.0 is 1x, 50.0 is 0.5x, 150.0 is 1.5x
|
||||
width (int): render resolution width
|
||||
height (int): render resolution height
|
||||
viewport_options (dict): viewport setting options
|
||||
Returns:
|
||||
list: Rendered output files
|
||||
"""
|
||||
if start_frame is None:
|
||||
start_frame = int(rt.animationRange.start)
|
||||
if end_frame is None:
|
||||
end_frame = int(rt.animationRange.end)
|
||||
|
||||
if viewport_options is None:
|
||||
viewport_options = viewport_options_for_preview_animation()
|
||||
with play_preview_when_done(False):
|
||||
with viewport_layout_and_camera(camera):
|
||||
if int(get_max_version()) < 2024:
|
||||
with viewport_preference_setting(
|
||||
viewport_options["general_viewport"],
|
||||
viewport_options["nitrous_manager"],
|
||||
viewport_options["nitrous_viewport"],
|
||||
viewport_options["vp_btn_mgr"]
|
||||
):
|
||||
return _render_preview_animation_max_pre_2024(
|
||||
filepath,
|
||||
start_frame,
|
||||
end_frame,
|
||||
width,
|
||||
height,
|
||||
percentSize,
|
||||
ext
|
||||
)
|
||||
else:
|
||||
with render_resolution(width, height):
|
||||
return _render_preview_animation_max_2024(
|
||||
filepath,
|
||||
start_frame,
|
||||
end_frame,
|
||||
percentSize,
|
||||
ext,
|
||||
viewport_options
|
||||
)
|
||||
|
||||
|
||||
def viewport_options_for_preview_animation():
|
||||
"""Get default viewport options for `render_preview_animation`.
|
||||
|
||||
Returns:
|
||||
dict: viewport setting options
|
||||
"""
|
||||
# viewport_options should be the dictionary
|
||||
if int(get_max_version()) < 2024:
|
||||
return {
|
||||
"visualStyleMode": "defaultshading",
|
||||
"viewportPreset": "highquality",
|
||||
"vpTexture": False,
|
||||
"dspGeometry": True,
|
||||
"dspShapes": False,
|
||||
"dspLights": False,
|
||||
"dspCameras": False,
|
||||
"dspHelpers": False,
|
||||
"dspParticles": True,
|
||||
"dspBones": False,
|
||||
"dspBkg": True,
|
||||
"dspGrid": False,
|
||||
"dspSafeFrame": False,
|
||||
"dspFrameNums": False
|
||||
}
|
||||
else:
|
||||
viewport_options = {}
|
||||
viewport_options["general_viewport"] = {
|
||||
"dspBkg": True,
|
||||
"dspGrid": False
|
||||
}
|
||||
viewport_options["nitrous_manager"] = {
|
||||
"AntialiasingQuality": "None"
|
||||
}
|
||||
viewport_options["nitrous_viewport"] = {
|
||||
"VisualStyleMode": "defaultshading",
|
||||
"ViewportPreset": "highquality",
|
||||
"UseTextureEnabled": False
|
||||
}
|
||||
viewport_options["vp_btn_mgr"] = {
|
||||
"EnableButtons": False}
|
||||
return viewport_options
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch to force 3ds max startup script."""
|
||||
import os
|
||||
from ayon_max import MAX_HOST_DIR
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
"""Inject AYON environment to 3ds max.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
environment.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = {"3dsmax", "adsk_3dsmax"}
|
||||
order = 11
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
startup_args = [
|
||||
"-U",
|
||||
"MAXScript",
|
||||
os.path.join(MAX_HOST_DIR, "startup", "startup.ms"),
|
||||
]
|
||||
self.launch_context.launch_args.append(startup_args)
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch hook to inject python environment."""
|
||||
import os
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class InjectPythonPath(PreLaunchHook):
|
||||
"""Inject AYON environment to 3dsmax.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
environment.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = {"3dsmax", "adsk_3dsmax"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
self.launch_context.env["MAX_PYTHONPATH"] = os.environ["PYTHONPATH"]
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class SetPath(PreLaunchHook):
|
||||
"""Set current dir to workdir.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = {"max"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
workdir = self.launch_context.env.get("AYON_WORKDIR", "")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
||||
self.launch_context.kwargs["cwd"] = workdir
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating camera."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateCamera(plugin.MaxCreator):
|
||||
"""Creator plugin for Camera."""
|
||||
identifier = "io.openpype.creators.max.camera"
|
||||
label = "Camera"
|
||||
product_type = "camera"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating raw max scene."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateMaxScene(plugin.MaxCreator):
|
||||
"""Creator plugin for 3ds max scenes."""
|
||||
identifier = "io.openpype.creators.max.maxScene"
|
||||
label = "Max Scene"
|
||||
product_type = "maxScene"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for model."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateModel(plugin.MaxCreator):
|
||||
"""Creator plugin for Model."""
|
||||
identifier = "io.openpype.creators.max.model"
|
||||
label = "Model"
|
||||
product_type = "model"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating pointcache alembics."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreatePointCache(plugin.MaxCreator):
|
||||
"""Creator plugin for Point caches."""
|
||||
identifier = "io.openpype.creators.max.pointcache"
|
||||
label = "Point Cache"
|
||||
product_type = "pointcache"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating point cloud."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreatePointCloud(plugin.MaxCreator):
|
||||
"""Creator plugin for Point Clouds."""
|
||||
identifier = "io.openpype.creators.max.pointcloud"
|
||||
label = "Point Cloud"
|
||||
product_type = "pointcloud"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating camera."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateRedshiftProxy(plugin.MaxCreator):
|
||||
identifier = "io.openpype.creators.max.redshiftproxy"
|
||||
label = "Redshift Proxy"
|
||||
product_type = "redshiftproxy"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating camera."""
|
||||
import os
|
||||
from ayon_max.api import plugin
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_max.api.lib_rendersettings import RenderSettings
|
||||
|
||||
|
||||
class CreateRender(plugin.MaxCreator):
|
||||
"""Creator plugin for Renders."""
|
||||
identifier = "io.openpype.creators.max.render"
|
||||
label = "Render"
|
||||
product_type = "maxrender"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
from pymxs import runtime as rt
|
||||
file = rt.maxFileName
|
||||
filename, _ = os.path.splitext(file)
|
||||
instance_data["AssetName"] = filename
|
||||
instance_data["multiCamera"] = pre_create_data.get("multi_cam")
|
||||
num_of_renderlayer = rt.batchRenderMgr.numViews
|
||||
if num_of_renderlayer > 0:
|
||||
rt.batchRenderMgr.DeleteView(num_of_renderlayer)
|
||||
|
||||
instance = super(CreateRender, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
container_name = instance.data.get("instance_node")
|
||||
# set output paths for rendering(mandatory for deadline)
|
||||
RenderSettings().render_output(container_name)
|
||||
# TODO: create multiple camera options
|
||||
if self.selected_nodes:
|
||||
selected_nodes_name = []
|
||||
for sel in self.selected_nodes:
|
||||
name = sel.name
|
||||
selected_nodes_name.append(name)
|
||||
RenderSettings().batch_render_layer(
|
||||
container_name, filename,
|
||||
selected_nodes_name)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateRender, self).get_pre_create_attr_defs()
|
||||
return attrs + [
|
||||
BoolDef("multi_cam",
|
||||
label="Multiple Cameras Submission",
|
||||
default=False),
|
||||
]
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating review in Max."""
|
||||
from ayon_max.api import plugin
|
||||
from ayon_core.lib import BoolDef, EnumDef, NumberDef
|
||||
|
||||
|
||||
class CreateReview(plugin.MaxCreator):
|
||||
"""Review in 3dsMax"""
|
||||
|
||||
identifier = "io.openpype.creators.max.review"
|
||||
label = "Review"
|
||||
product_type = "review"
|
||||
icon = "video-camera"
|
||||
|
||||
settings_category = "max"
|
||||
|
||||
review_width = 1920
|
||||
review_height = 1080
|
||||
percentSize = 100
|
||||
keep_images = False
|
||||
image_format = "png"
|
||||
visual_style = "Realistic"
|
||||
viewport_preset = "Quality"
|
||||
vp_texture = True
|
||||
anti_aliasing = "None"
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
settings = project_settings["max"]["CreateReview"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
self.review_width = settings.get("review_width", self.review_width)
|
||||
self.review_height = settings.get("review_height", self.review_height)
|
||||
self.percentSize = settings.get("percentSize", self.percentSize)
|
||||
self.keep_images = settings.get("keep_images", self.keep_images)
|
||||
self.image_format = settings.get("image_format", self.image_format)
|
||||
self.visual_style = settings.get("visual_style", self.visual_style)
|
||||
self.viewport_preset = settings.get(
|
||||
"viewport_preset", self.viewport_preset)
|
||||
self.anti_aliasing = settings.get(
|
||||
"anti_aliasing", self.anti_aliasing)
|
||||
self.vp_texture = settings.get("vp_texture", self.vp_texture)
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
for key in ["imageFormat",
|
||||
"keepImages",
|
||||
"review_width",
|
||||
"review_height",
|
||||
"percentSize",
|
||||
"visualStyleMode",
|
||||
"viewportPreset",
|
||||
"antialiasingQuality",
|
||||
"vpTexture"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
super(CreateReview, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
image_format_enum = ["exr", "jpg", "png", "tga"]
|
||||
|
||||
visual_style_preset_enum = [
|
||||
"Realistic", "Shaded", "Facets",
|
||||
"ConsistentColors", "HiddenLine",
|
||||
"Wireframe", "BoundingBox", "Ink",
|
||||
"ColorInk", "Acrylic", "Tech", "Graphite",
|
||||
"ColorPencil", "Pastel", "Clay", "ModelAssist"
|
||||
]
|
||||
preview_preset_enum = [
|
||||
"Quality", "Standard", "Performance",
|
||||
"DXMode", "Customize"]
|
||||
anti_aliasing_enum = ["None", "2X", "4X", "8X"]
|
||||
|
||||
return [
|
||||
NumberDef("review_width",
|
||||
label="Review width",
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
default=self.review_width),
|
||||
NumberDef("review_height",
|
||||
label="Review height",
|
||||
decimals=0,
|
||||
minimum=0,
|
||||
default=self.review_height),
|
||||
NumberDef("percentSize",
|
||||
label="Percent of Output",
|
||||
default=self.percentSize,
|
||||
minimum=1,
|
||||
decimals=0),
|
||||
BoolDef("keepImages",
|
||||
label="Keep Image Sequences",
|
||||
default=self.keep_images),
|
||||
EnumDef("imageFormat",
|
||||
image_format_enum,
|
||||
default=self.image_format,
|
||||
label="Image Format Options"),
|
||||
EnumDef("visualStyleMode",
|
||||
visual_style_preset_enum,
|
||||
default=self.visual_style,
|
||||
label="Preference"),
|
||||
EnumDef("viewportPreset",
|
||||
preview_preset_enum,
|
||||
default=self.viewport_preset,
|
||||
label="Preview Preset"),
|
||||
EnumDef("antialiasingQuality",
|
||||
anti_aliasing_enum,
|
||||
default=self.anti_aliasing,
|
||||
label="Anti-aliasing Quality"),
|
||||
BoolDef("vpTexture",
|
||||
label="Viewport Texture",
|
||||
default=self.vp_texture)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
# Use same attributes as for instance attributes
|
||||
attrs = super().get_pre_create_attr_defs()
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating TyCache."""
|
||||
from ayon_max.api import plugin
|
||||
|
||||
|
||||
class CreateTyCache(plugin.MaxCreator):
|
||||
"""Creator plugin for TyCache."""
|
||||
identifier = "io.openpype.creators.max.tycache"
|
||||
label = "TyCache"
|
||||
product_type = "tycache"
|
||||
icon = "gear"
|
||||
|
||||
settings_category = "max"
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating workfiles."""
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatedInstance, AutoCreator
|
||||
from ayon_max.api import plugin
|
||||
from ayon_max.api.lib import read, imprint
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator):
|
||||
"""Workfile auto-creator."""
|
||||
identifier = "io.ayon.creators.max.workfile"
|
||||
label = "Workfile"
|
||||
product_type = "workfile"
|
||||
icon = "fa5.file"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
settings_category = "max"
|
||||
|
||||
def create(self):
|
||||
variant = self.default_variant
|
||||
current_instance = next(
|
||||
(
|
||||
instance for instance in self.create_context.instances
|
||||
if instance.creator_identifier == self.identifier
|
||||
), None)
|
||||
project_name = self.project_name
|
||||
folder_path = self.create_context.get_current_folder_path()
|
||||
task_name = self.create_context.get_current_task_name()
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
if current_instance is None:
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name,
|
||||
)
|
||||
data = {
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
|
||||
data.update(
|
||||
self.get_dynamic_data(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name,
|
||||
current_instance)
|
||||
)
|
||||
self.log.info("Auto-creating workfile instance...")
|
||||
instance_node = self.create_node(product_name)
|
||||
data["instance_node"] = instance_node.name
|
||||
current_instance = CreatedInstance(
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(current_instance)
|
||||
imprint(instance_node.name, current_instance.data)
|
||||
elif (
|
||||
current_instance["folderPath"] != folder_path
|
||||
or current_instance["task"] != task_name
|
||||
):
|
||||
# Update instance context if is not the same
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name,
|
||||
)
|
||||
|
||||
current_instance["folderPath"] = folder_entity["path"]
|
||||
current_instance["task"] = task_name
|
||||
current_instance["productName"] = product_name
|
||||
|
||||
def collect_instances(self):
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
cached_instances = self.collection_shared_data["max_cached_instances"]
|
||||
for instance in cached_instances.get(self.identifier, []):
|
||||
if not rt.getNodeByName(instance):
|
||||
continue
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(rt.GetNodeByName(instance)), self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _ in update_list:
|
||||
instance_node = created_inst.get("instance_node")
|
||||
imprint(
|
||||
instance_node,
|
||||
created_inst.data_to_store()
|
||||
)
|
||||
|
||||
def create_node(self, product_name):
|
||||
if rt.getNodeByName(product_name):
|
||||
node = rt.getNodeByName(product_name)
|
||||
return node
|
||||
node = rt.Container(name=product_name)
|
||||
node.isHidden = True
|
||||
return node
|
||||
|
|
@ -1,101 +0,0 @@
|
|||
import os
|
||||
|
||||
from ayon_max.api import lib
|
||||
from ayon_max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
update_custom_attribute_data,
|
||||
remove_container_data
|
||||
)
|
||||
from ayon_core.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
class FbxLoader(load.LoaderPlugin):
|
||||
"""Fbx Loader."""
|
||||
|
||||
product_types = {"camera"}
|
||||
representations = {"fbx"}
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
filepath,
|
||||
rt.name("noPrompt"),
|
||||
using=rt.FBXIMP)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
selections = rt.GetCurrentSelection()
|
||||
|
||||
for selection in selections:
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, selections, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, context):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
repre_entity = context["representation"]
|
||||
path = get_representation_path(repre_entity)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, _ = get_namespace(node_name)
|
||||
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
prev_fbx_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_fbx_objects)
|
||||
for prev_fbx_obj in prev_fbx_objects:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
fbx_objects = []
|
||||
for fbx_object in current_fbx_objects:
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object.name}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, fbx_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": repre_entity["id"]
|
||||
})
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
remove_container_data(node)
|
||||
|
|
@ -1,178 +0,0 @@
|
|||
import os
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from ayon_core.lib.attribute_definitions import EnumDef
|
||||
from ayon_max.api import lib
|
||||
from ayon_max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set,
|
||||
is_headless
|
||||
)
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise, get_previous_loaded_object,
|
||||
update_custom_attribute_data,
|
||||
remove_container_data
|
||||
)
|
||||
from ayon_core.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
class MaterialDupOptionsWindow(QtWidgets.QDialog):
|
||||
"""The pop-up dialog allows users to choose material
|
||||
duplicate options for importing Max objects when updating
|
||||
or switching assets.
|
||||
"""
|
||||
def __init__(self, material_options):
|
||||
super(MaterialDupOptionsWindow, self).__init__()
|
||||
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
|
||||
|
||||
self.material_option = None
|
||||
self.material_options = material_options
|
||||
|
||||
self.widgets = {
|
||||
"label": QtWidgets.QLabel(
|
||||
"Select material duplicate options before loading the max scene."),
|
||||
"material_options_list": QtWidgets.QListWidget(),
|
||||
"warning": QtWidgets.QLabel("No material options selected!"),
|
||||
"buttons": QtWidgets.QWidget(),
|
||||
"okButton": QtWidgets.QPushButton("Ok"),
|
||||
"cancelButton": QtWidgets.QPushButton("Cancel")
|
||||
}
|
||||
for key, value in material_options.items():
|
||||
item = QtWidgets.QListWidgetItem(value)
|
||||
self.widgets["material_options_list"].addItem(item)
|
||||
item.setData(QtCore.Qt.UserRole, key)
|
||||
# Build buttons.
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
|
||||
layout.addWidget(self.widgets["okButton"])
|
||||
layout.addWidget(self.widgets["cancelButton"])
|
||||
# Build layout.
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(self.widgets["label"])
|
||||
layout.addWidget(self.widgets["material_options_list"])
|
||||
layout.addWidget(self.widgets["buttons"])
|
||||
|
||||
self.widgets["okButton"].pressed.connect(self.on_ok_pressed)
|
||||
self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed)
|
||||
self.widgets["material_options_list"].itemPressed.connect(
|
||||
self.on_material_options_pressed)
|
||||
|
||||
def on_material_options_pressed(self, item):
|
||||
self.material_option = item.data(QtCore.Qt.UserRole)
|
||||
|
||||
def on_ok_pressed(self):
|
||||
if self.material_option is None:
|
||||
self.widgets["warning"].setVisible(True)
|
||||
return
|
||||
self.close()
|
||||
|
||||
def on_cancel_pressed(self):
|
||||
self.material_option = "promptMtlDups"
|
||||
self.close()
|
||||
|
||||
class MaxSceneLoader(load.LoaderPlugin):
|
||||
"""Max Scene Loader."""
|
||||
|
||||
product_types = {
|
||||
"camera",
|
||||
"maxScene",
|
||||
"model",
|
||||
}
|
||||
|
||||
representations = {"max"}
|
||||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
mtl_dup_default = "promptMtlDups"
|
||||
mtl_dup_enum_dict = {
|
||||
"promptMtlDups": "Prompt on Duplicate Materials",
|
||||
"useMergedMtlDups": "Use Incoming Material",
|
||||
"useSceneMtlDups": "Use Scene Material",
|
||||
"renameMtlDups": "Merge and Rename Incoming Material"
|
||||
}
|
||||
@classmethod
|
||||
def get_options(cls, contexts):
|
||||
return [
|
||||
EnumDef("mtldup",
|
||||
items=cls.mtl_dup_enum_dict,
|
||||
default=cls.mtl_dup_default,
|
||||
label="Material Duplicate Options")
|
||||
]
|
||||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
from pymxs import runtime as rt
|
||||
mat_dup_options = options.get("mtldup", self.mtl_dup_default)
|
||||
path = self.filepath_from_context(context)
|
||||
path = os.path.normpath(path)
|
||||
# import the max scene by using "merge file"
|
||||
path = path.replace('\\', '/')
|
||||
rt.MergeMaxFile(path, rt.Name(mat_dup_options),
|
||||
quiet=True, includeFullGroup=True)
|
||||
max_objects = rt.getLastMergedNodes()
|
||||
max_object_names = [obj.name for obj in max_objects]
|
||||
# implement the OP/AYON custom attributes before load
|
||||
max_container = []
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_container.append(max_obj)
|
||||
return containerise(
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, context):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
repre_entity = context["representation"]
|
||||
path = get_representation_path(repre_entity)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, _ = get_namespace(node_name)
|
||||
# delete the old container with attribute
|
||||
# delete old duplicate
|
||||
# use the modifier OP data to delete the data
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.select(node_list)
|
||||
prev_max_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_max_objects)
|
||||
|
||||
for prev_max_obj in prev_max_objects:
|
||||
if rt.isValidNode(prev_max_obj): # noqa
|
||||
rt.Delete(prev_max_obj)
|
||||
material_option = self.mtl_dup_default
|
||||
if not is_headless():
|
||||
window = MaterialDupOptionsWindow(self.mtl_dup_enum_dict)
|
||||
window.exec_()
|
||||
material_option = window.material_option
|
||||
rt.MergeMaxFile(path, rt.Name(material_option), quiet=True)
|
||||
|
||||
current_max_objects = rt.getLastMergedNodes()
|
||||
|
||||
current_max_object_names = [obj.name for obj
|
||||
in current_max_objects]
|
||||
|
||||
max_objects = []
|
||||
for max_obj, obj_name in zip(current_max_objects,
|
||||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_objects.append(max_obj)
|
||||
max_transform = f"{max_obj}.transform"
|
||||
if max_transform in transform_data.keys():
|
||||
max_obj.pos = transform_data[max_transform] or 0
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj}.scale"] or 0
|
||||
|
||||
update_custom_attribute_data(node, max_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": repre_entity["id"]
|
||||
})
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
remove_container_data(node)
|
||||
|
|
@ -1,123 +0,0 @@
|
|||
import os
|
||||
from ayon_core.pipeline import load, get_representation_path
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
remove_container_data
|
||||
)
|
||||
from ayon_max.api import lib
|
||||
from ayon_max.api.lib import (
|
||||
maintained_selection, unique_namespace
|
||||
)
|
||||
|
||||
|
||||
class ModelAbcLoader(load.LoaderPlugin):
|
||||
"""Loading model with the Alembic loader."""
|
||||
|
||||
product_types = {"model"}
|
||||
label = "Load Model with Alembic"
|
||||
representations = {"abc"}
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
file_path = os.path.normpath(self.filepath_from_context(context))
|
||||
|
||||
abc_before = {
|
||||
c
|
||||
for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.AlembicContainer
|
||||
}
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.AlembicImport.UVs = True
|
||||
rt.AlembicImport.VertexColors = True
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
c
|
||||
for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.AlembicContainer
|
||||
}
|
||||
|
||||
# This should yield new AlembicContainer node
|
||||
abc_containers = abc_after.difference(abc_before)
|
||||
|
||||
if len(abc_containers) != 1:
|
||||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
abc_objects = []
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
abc_objects.append(abc_object)
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}"
|
||||
abc_container.name = abc_container_name
|
||||
abc_objects.append(abc_container)
|
||||
|
||||
return containerise(
|
||||
name, abc_objects, context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
repre_entity = context["representation"]
|
||||
path = get_representation_path(repre_entity)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
node_list = [n for n in get_previous_loaded_object(node)
|
||||
if rt.ClassOf(n) == rt.AlembicContainer]
|
||||
with maintained_selection():
|
||||
rt.Select(node_list)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": repre_entity["id"]},
|
||||
)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
remove_container_data(node)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_container_children(parent, type_name):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
def list_children(node):
|
||||
children = []
|
||||
for c in node.Children:
|
||||
children.append(c)
|
||||
children += list_children(c)
|
||||
return children
|
||||
|
||||
filtered = []
|
||||
for child in list_children(parent):
|
||||
class_type = str(rt.ClassOf(child.baseObject))
|
||||
if class_type == type_name:
|
||||
filtered.append(child)
|
||||
|
||||
return filtered
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
import os
|
||||
from ayon_core.pipeline import load, get_representation_path
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise, get_previous_loaded_object,
|
||||
update_custom_attribute_data,
|
||||
remove_container_data
|
||||
)
|
||||
from ayon_max.api import lib
|
||||
from ayon_max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from ayon_max.api.lib import maintained_selection
|
||||
|
||||
|
||||
class FbxModelLoader(load.LoaderPlugin):
|
||||
"""Fbx Model Loader."""
|
||||
|
||||
product_types = {"model"}
|
||||
representations = {"fbx"}
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(
|
||||
filepath, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
selections = rt.GetCurrentSelection()
|
||||
|
||||
for selection in selections:
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, selections, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, context):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
repre_entity = context["representation"]
|
||||
path = get_representation_path(repre_entity)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
if not node:
|
||||
rt.Container(name=node_name)
|
||||
namespace, _ = get_namespace(node_name)
|
||||
|
||||
node_list = get_previous_loaded_object(node)
|
||||
rt.Select(node_list)
|
||||
prev_fbx_objects = rt.GetCurrentSelection()
|
||||
transform_data = object_transform_set(prev_fbx_objects)
|
||||
for prev_fbx_obj in prev_fbx_objects:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
fbx_objects = []
|
||||
for fbx_object in current_fbx_objects:
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_objects.append(fbx_object)
|
||||
fbx_transform = f"{fbx_object}.transform"
|
||||
if fbx_transform in transform_data.keys():
|
||||
fbx_object.pos = transform_data[fbx_transform] or 0
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object}.scale"] or 0
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
update_custom_attribute_data(node, fbx_objects)
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": repre_entity["id"]
|
||||
})
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
remove_container_data(node)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue