mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/abstracting_colorspace_publishing_functions
This commit is contained in:
commit
aeef95f250
76 changed files with 1595 additions and 538 deletions
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,8 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.5-nightly.4
|
||||
- 3.16.5-nightly.3
|
||||
- 3.16.5-nightly.2
|
||||
- 3.16.5-nightly.1
|
||||
- 3.16.4
|
||||
|
|
@ -133,8 +135,6 @@ body:
|
|||
- 3.14.9-nightly.1
|
||||
- 3.14.8
|
||||
- 3.14.8-nightly.4
|
||||
- 3.14.8-nightly.3
|
||||
- 3.14.8-nightly.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
|
|||
|
||||
#### Clone repository:
|
||||
```sh
|
||||
git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
|
||||
git clone --recurse-submodules git@github.com:ynput/OpenPype.git
|
||||
```
|
||||
|
||||
#### To build OpenPype:
|
||||
|
|
@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7
|
|||
|
||||
If all is successful, you'll find built OpenPype in `./build/` folder.
|
||||
|
||||
Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script.
|
||||
|
||||
This could be used even for building linux build (with argument `centos7` or `debian`)
|
||||
|
||||
#### Manual build
|
||||
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
|
||||
|
||||
|
|
|
|||
|
|
@ -45,6 +45,9 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
if config_data:
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
|
|
|
|||
|
|
@ -138,7 +138,6 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.toBeRenderedOn = "deadline"
|
||||
instance.renderer = "aerender"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
|
|
|
|||
|
|
@ -108,7 +108,6 @@ class CollectFusionRender(
|
|||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.toBeRenderedOn = "deadline"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
|
|
|
|||
|
|
@ -147,13 +147,13 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
attachTo=False,
|
||||
setMembers=[node],
|
||||
publish=info[4],
|
||||
review=False,
|
||||
renderer=None,
|
||||
priority=50,
|
||||
name=node.split("/")[1],
|
||||
|
||||
family="render.farm",
|
||||
families=["render.farm"],
|
||||
farm=True,
|
||||
|
||||
resolutionWidth=context.data["resolutionWidth"],
|
||||
resolutionHeight=context.data["resolutionHeight"],
|
||||
|
|
@ -174,7 +174,6 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
outputFormat=info[1],
|
||||
outputStartFrame=info[3],
|
||||
leadingZeros=info[2],
|
||||
toBeRenderedOn='deadline',
|
||||
ignoreFrameHandleCheck=True
|
||||
|
||||
)
|
||||
|
|
|
|||
|
|
@ -57,28 +57,31 @@ def create_interactive(creator_identifier, **kwargs):
|
|||
list: The created instances.
|
||||
|
||||
"""
|
||||
|
||||
# TODO Use Qt instead
|
||||
result, variant = hou.ui.readInput('Define variant name',
|
||||
buttons=("Ok", "Cancel"),
|
||||
initial_contents='Main',
|
||||
title="Define variant",
|
||||
help="Set the variant for the "
|
||||
"publish instance",
|
||||
close_choice=1)
|
||||
if result == 1:
|
||||
# User interrupted
|
||||
return
|
||||
variant = variant.strip()
|
||||
if not variant:
|
||||
raise RuntimeError("Empty variant value entered.")
|
||||
|
||||
host = registered_host()
|
||||
context = CreateContext(host)
|
||||
creator = context.manual_creators.get(creator_identifier)
|
||||
if not creator:
|
||||
raise RuntimeError("Invalid creator identifier: "
|
||||
"{}".format(creator_identifier))
|
||||
raise RuntimeError("Invalid creator identifier: {}".format(
|
||||
creator_identifier)
|
||||
)
|
||||
|
||||
# TODO Use Qt instead
|
||||
result, variant = hou.ui.readInput(
|
||||
"Define variant name",
|
||||
buttons=("Ok", "Cancel"),
|
||||
initial_contents=creator.get_default_variant(),
|
||||
title="Define variant",
|
||||
help="Set the variant for the publish instance",
|
||||
close_choice=1
|
||||
)
|
||||
|
||||
if result == 1:
|
||||
# User interrupted
|
||||
return
|
||||
|
||||
variant = variant.strip()
|
||||
if not variant:
|
||||
raise RuntimeError("Empty variant value entered.")
|
||||
|
||||
# TODO: Once more elaborate unique create behavior should exist per Creator
|
||||
# instead of per network editor area then we should move this from here
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ log = logging.getLogger(__name__)
|
|||
JSON_PREFIX = "JSON:::"
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
def get_asset_fps(asset_doc=None):
|
||||
"""Return current asset fps."""
|
||||
return get_current_project_asset()["data"].get("fps")
|
||||
|
||||
if asset_doc is None:
|
||||
asset_doc = get_current_project_asset(fields=["data.fps"])
|
||||
return asset_doc["data"]["fps"]
|
||||
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
|
|
@ -472,14 +475,19 @@ def maintained_selection():
|
|||
|
||||
|
||||
def reset_framerange():
|
||||
"""Set frame range to current asset"""
|
||||
"""Set frame range and FPS to current asset"""
|
||||
|
||||
# Get asset data
|
||||
project_name = get_current_project_name()
|
||||
asset_name = get_current_asset_name()
|
||||
# Get the asset ID from the database for the asset of current context
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
asset_data = asset_doc["data"]
|
||||
|
||||
# Get FPS
|
||||
fps = get_asset_fps(asset_doc)
|
||||
|
||||
# Get Start and End Frames
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
|
||||
|
|
@ -493,6 +501,9 @@ def reset_framerange():
|
|||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
# Set frame range and FPS
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from openpype.lib import (
|
|||
emit_event,
|
||||
)
|
||||
|
||||
from .lib import get_asset_fps
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
|
|
@ -385,11 +384,6 @@ def _set_context_settings():
|
|||
None
|
||||
"""
|
||||
|
||||
# Set new scene fps
|
||||
fps = get_asset_fps()
|
||||
print("Setting scene FPS to %i" % fps)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
lib.reset_framerange()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
parms["soppath"] = self.selected_nodes[0].path()
|
||||
parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
|
|
@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_sop_node_path(self, selected_node):
|
||||
"""Get Sop Path of the selected node.
|
||||
|
||||
Although Houdini allows ObjNode path on `sop_path` for the
|
||||
the ROP node, we prefer it set to the SopNode path explicitly.
|
||||
"""
|
||||
|
||||
# Allow sop level paths (e.g. /obj/geo1/box1)
|
||||
if isinstance(selected_node, hou.SopNode):
|
||||
self.log.debug(
|
||||
"Valid SopNode selection, 'SOP Path' in ROP will"
|
||||
" be set to '%s'.", selected_node.path()
|
||||
)
|
||||
return selected_node.path()
|
||||
|
||||
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
|
||||
# but do not allow other object level nodes types like cameras, etc.
|
||||
elif isinstance(selected_node, hou.ObjNode) and \
|
||||
selected_node.type().name() == "geo":
|
||||
|
||||
# Try to find output node.
|
||||
sop_node = self.get_obj_output(selected_node)
|
||||
if sop_node:
|
||||
self.log.debug(
|
||||
"Valid ObjNode selection, 'SOP Path' in ROP will "
|
||||
"be set to the child path '%s'.", sop_node.path()
|
||||
)
|
||||
return sop_node.path()
|
||||
|
||||
self.log.debug(
|
||||
"Selection isn't valid. 'SOP Path' in ROP will be empty."
|
||||
)
|
||||
return ""
|
||||
|
||||
def get_obj_output(self, obj_node):
|
||||
"""Try to find output node.
|
||||
|
||||
If any output nodes are present, return the output node with
|
||||
the minimum 'outputidx'
|
||||
If no output nodes are present, return the node with display flag
|
||||
If no nodes are present at all, return None
|
||||
"""
|
||||
|
||||
outputs = obj_node.subnetOutputs()
|
||||
|
||||
# if obj_node is empty
|
||||
if not outputs:
|
||||
return
|
||||
|
||||
# if obj_node has one output child whether its
|
||||
# sop output node or a node with the render flag
|
||||
elif len(outputs) == 1:
|
||||
return outputs[0]
|
||||
|
||||
# if there are more than one, then it has multiple output nodes
|
||||
# return the one with the minimum 'outputidx'
|
||||
else:
|
||||
return min(outputs,
|
||||
key=lambda node: node.evalParm('outputidx'))
|
||||
|
|
|
|||
|
|
@ -59,6 +59,9 @@ class HdaLoader(load.LoaderPlugin):
|
|||
def_paths = [d.libraryFilePath() for d in defs]
|
||||
new = def_paths.index(file_path)
|
||||
defs[new].setIsPreferred(True)
|
||||
hda_node.setParms({
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,19 @@
|
|||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<label>OpenPype</label>
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AVALON_LABEL") or "OpenPype"
|
||||
]]></labelExpression>
|
||||
<actionItem id="asset_name">
|
||||
<labelExpression><![CDATA[
|
||||
from openpype.pipeline import get_current_asset_name, get_current_task_name
|
||||
label = "{}, {}".format(get_current_asset_name(), get_current_task_name())
|
||||
return label
|
||||
]]></labelExpression>
|
||||
</actionItem>
|
||||
|
||||
<separatorItem/>
|
||||
|
||||
<scriptItem id="openpype_create">
|
||||
<label>Create...</label>
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from typing import Any, Dict, Union
|
|||
|
||||
import six
|
||||
from openpype.pipeline.context_tools import (
|
||||
get_current_project, get_current_project_asset,)
|
||||
get_current_project, get_current_project_asset)
|
||||
from pymxs import runtime as rt
|
||||
|
||||
JSON_PREFIX = "JSON::"
|
||||
|
|
@ -312,3 +312,98 @@ def set_timeline(frameStart, frameEnd):
|
|||
"""
|
||||
rt.animationRange = rt.interval(frameStart, frameEnd)
|
||||
return rt.animationRange
|
||||
|
||||
|
||||
def unique_namespace(namespace, format="%02d",
|
||||
prefix="", suffix="", con_suffix="CON"):
|
||||
"""Return unique namespace
|
||||
|
||||
Arguments:
|
||||
namespace (str): Name of namespace to consider
|
||||
format (str, optional): Formatting of the given iteration number
|
||||
suffix (str, optional): Only consider namespaces with this suffix.
|
||||
con_suffix: max only, for finding the name of the master container
|
||||
|
||||
>>> unique_namespace("bar")
|
||||
# bar01
|
||||
>>> unique_namespace(":hello")
|
||||
# :hello01
|
||||
>>> unique_namespace("bar:", suffix="_NS")
|
||||
# bar01_NS:
|
||||
|
||||
"""
|
||||
|
||||
def current_namespace():
|
||||
current = namespace
|
||||
# When inside a namespace Max adds no trailing :
|
||||
if not current.endswith(":"):
|
||||
current += ":"
|
||||
return current
|
||||
|
||||
# Always check against the absolute namespace root
|
||||
# There's no clash with :x if we're defining namespace :a:x
|
||||
ROOT = ":" if namespace.startswith(":") else current_namespace()
|
||||
|
||||
# Strip trailing `:` tokens since we might want to add a suffix
|
||||
start = ":" if namespace.startswith(":") else ""
|
||||
end = ":" if namespace.endswith(":") else ""
|
||||
namespace = namespace.strip(":")
|
||||
if ":" in namespace:
|
||||
# Split off any nesting that we don't uniqify anyway.
|
||||
parents, namespace = namespace.rsplit(":", 1)
|
||||
start += parents + ":"
|
||||
ROOT += start
|
||||
|
||||
iteration = 1
|
||||
increment_version = True
|
||||
while increment_version:
|
||||
nr_namespace = namespace + format % iteration
|
||||
unique = prefix + nr_namespace + suffix
|
||||
container_name = f"{unique}:{namespace}{con_suffix}"
|
||||
if not rt.getNodeByName(container_name):
|
||||
name_space = start + unique + end
|
||||
increment_version = False
|
||||
return name_space
|
||||
else:
|
||||
increment_version = True
|
||||
iteration += 1
|
||||
|
||||
|
||||
def get_namespace(container_name):
|
||||
"""Get the namespace and name of the sub-container
|
||||
|
||||
Args:
|
||||
container_name (str): the name of master container
|
||||
|
||||
Raises:
|
||||
RuntimeError: when there is no master container found
|
||||
|
||||
Returns:
|
||||
namespace (str): namespace of the sub-container
|
||||
name (str): name of the sub-container
|
||||
"""
|
||||
node = rt.getNodeByName(container_name)
|
||||
if not node:
|
||||
raise RuntimeError("Master Container Not Found..")
|
||||
name = rt.getUserProp(node, "name")
|
||||
namespace = rt.getUserProp(node, "namespace")
|
||||
return namespace, name
|
||||
|
||||
|
||||
def object_transform_set(container_children):
|
||||
"""A function which allows to store the transform of
|
||||
previous loaded object(s)
|
||||
Args:
|
||||
container_children(list): A list of nodes
|
||||
|
||||
Returns:
|
||||
transform_set (dict): A dict with all transform data of
|
||||
the previous loaded object(s)
|
||||
"""
|
||||
transform_set = {}
|
||||
for node in container_children:
|
||||
name = f"{node.name}.transform"
|
||||
transform_set[name] = node.pos
|
||||
name = f"{node.name}.scale"
|
||||
transform_set[name] = node.scale
|
||||
return transform_set
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class RenderSettings(object):
|
|||
rt.viewport.setCamera(sel)
|
||||
break
|
||||
if not found:
|
||||
raise RuntimeError("Camera not found")
|
||||
raise RuntimeError("Active Camera not found")
|
||||
|
||||
def render_output(self, container):
|
||||
folder = rt.maxFilePath
|
||||
|
|
@ -113,7 +113,8 @@ class RenderSettings(object):
|
|||
# for setting up renderable camera
|
||||
arv = rt.MAXToAOps.ArnoldRenderView()
|
||||
render_camera = rt.viewport.GetCamera()
|
||||
arv.setOption("Camera", str(render_camera))
|
||||
if render_camera:
|
||||
arv.setOption("Camera", str(render_camera))
|
||||
|
||||
# TODO: add AOVs and extension
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.hosts.max.api.menu import OpenPypeMenu
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from openpype.hosts.max import MAX_HOST_DIR
|
||||
|
||||
|
||||
from pymxs import runtime as rt # noqa
|
||||
|
||||
log = logging.getLogger("openpype.hosts.max")
|
||||
|
|
@ -152,17 +154,18 @@ def ls() -> list:
|
|||
yield lib.read(container)
|
||||
|
||||
|
||||
def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
|
||||
def containerise(name: str, nodes: list, context,
|
||||
namespace=None, loader=None, suffix="_CON"):
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": "",
|
||||
"namespace": namespace or "",
|
||||
"loader": loader,
|
||||
"representation": context["representation"]["_id"],
|
||||
}
|
||||
|
||||
container_name = f"{name}{suffix}"
|
||||
container_name = f"{namespace}:{name}{suffix}"
|
||||
container = rt.container(name=container_name)
|
||||
for node in nodes:
|
||||
node.Parent = container
|
||||
|
|
@ -170,3 +173,52 @@ def containerise(name: str, nodes: list, context, loader=None, suffix="_CON"):
|
|||
if not lib.imprint(container_name, data):
|
||||
print(f"imprinting of {container_name} failed.")
|
||||
return container
|
||||
|
||||
|
||||
def load_custom_attribute_data():
|
||||
"""Re-loading the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Returns:
|
||||
attribute: re-loading the custom OP attributes set in Maxscript
|
||||
"""
|
||||
return rt.Execute(MS_CUSTOM_ATTRIB)
|
||||
|
||||
|
||||
def import_custom_attribute_data(container: str, selections: list):
|
||||
"""Importing the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
attrs = load_custom_attribute_data()
|
||||
modifier = rt.EmptyModifier()
|
||||
rt.addModifier(container, modifier)
|
||||
container.modifiers[0].name = "OP Data"
|
||||
rt.custAttributes.add(container.modifiers[0], attrs)
|
||||
nodes = {}
|
||||
for i in selections:
|
||||
nodes = {
|
||||
str(i): rt.NodeTransformMonitor(node=i),
|
||||
}
|
||||
# Setting the property
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"all_handles", nodes.values())
|
||||
rt.setProperty(
|
||||
container.modifiers[0].openPypeData,
|
||||
"sel_list", nodes.keys())
|
||||
|
||||
|
||||
def update_custom_attribute_data(container: str, selections: list):
|
||||
"""Updating the Openpype/AYON custom parameter built by the creator
|
||||
|
||||
Args:
|
||||
container (str): target container which adds custom attributes
|
||||
selections (list): nodes to be added into
|
||||
group in custom attributes
|
||||
"""
|
||||
if container.modifiers[0].name == "OP Data":
|
||||
rt.deleteModifier(container, container.modifiers[0])
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,16 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -13,50 +22,76 @@ class FbxLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = self.filepath_from_context(context)
|
||||
filepath = os.path.normpath(filepath)
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
filepath,
|
||||
rt.name("noPrompt"),
|
||||
using=rt.FBXIMP)
|
||||
|
||||
container = rt.GetNodeByName(f"{name}")
|
||||
if not container:
|
||||
container = rt.Container()
|
||||
container.name = f"{name}"
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Select(node.Children)
|
||||
fbx_reimport_cmd = (
|
||||
f"""
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
FBXImporterSetParam "Animation" true
|
||||
FBXImporterSetParam "Cameras" true
|
||||
FBXImporterSetParam "AxisConversionMethod" true
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
rt.FBXImporterSetParam("Animation", True)
|
||||
rt.FBXImporterSetParam("Camera", True)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.ImportFile(
|
||||
path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
importFile @"{path}" #noPrompt using:FBXIMP
|
||||
""")
|
||||
rt.Execute(fbx_reimport_cmd)
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -16,22 +24,34 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
path = os.path.normpath(path)
|
||||
# import the max scene by using "merge file"
|
||||
path = path.replace('\\', '/')
|
||||
rt.MergeMaxFile(path)
|
||||
rt.MergeMaxFile(path, quiet=True, includeFullGroup=True)
|
||||
max_objects = rt.getLastMergedNodes()
|
||||
max_container = rt.Container(name=f"{name}")
|
||||
for max_object in max_objects:
|
||||
max_object.Parent = max_container
|
||||
max_object_names = [obj.name for obj in max_objects]
|
||||
# implement the OP/AYON custom attributes before load
|
||||
max_container = []
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
container = rt.Container(name=container_name)
|
||||
import_custom_attribute_data(container, max_objects)
|
||||
max_container.append(container)
|
||||
max_container.extend(max_objects)
|
||||
for max_obj, obj_name in zip(max_objects, max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
return containerise(
|
||||
name, [max_container], context, loader=self.__class__.__name__)
|
||||
name, max_container, context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -39,15 +59,32 @@ class MaxSceneLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
|
||||
rt.MergeMaxFile(path,
|
||||
rt.Name("noRedraw"),
|
||||
rt.Name("deleteOldDups"),
|
||||
rt.Name("useSceneMtlDups"))
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
# delete the old container with attribute
|
||||
# delete old duplicate
|
||||
rt.Select(node.Children)
|
||||
transform_data = object_transform_set(node.Children)
|
||||
for prev_max_obj in rt.GetCurrentSelection():
|
||||
if rt.isValidNode(prev_max_obj) and prev_max_obj.name != sub_container_name: # noqa
|
||||
rt.Delete(prev_max_obj)
|
||||
rt.MergeMaxFile(path, rt.Name("deleteOldDups"))
|
||||
|
||||
max_objects = rt.getLastMergedNodes()
|
||||
container_node = rt.GetNodeByName(node_name)
|
||||
for max_object in max_objects:
|
||||
max_object.Parent = container_node
|
||||
current_max_objects = rt.getLastMergedNodes()
|
||||
current_max_object_names = [obj.name for obj
|
||||
in current_max_objects]
|
||||
sub_container = rt.getNodeByName(sub_container_name)
|
||||
update_custom_attribute_data(sub_container, current_max_objects)
|
||||
for max_object in current_max_objects:
|
||||
max_object.Parent = node
|
||||
for max_obj, obj_name in zip(current_max_objects,
|
||||
current_max_object_names):
|
||||
max_obj.name = f"{namespace}:{obj_name}"
|
||||
max_obj.pos = transform_data[
|
||||
f"{max_obj.name}.transform"]
|
||||
max_obj.scale = transform_data[
|
||||
f"{max_obj.name}.scale"]
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.lib import (
|
||||
maintained_selection, unique_namespace
|
||||
)
|
||||
|
||||
|
||||
class ModelAbcLoader(load.LoaderPlugin):
|
||||
|
|
@ -14,6 +20,7 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,7 +37,7 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
rt.AlembicImport.CustomAttributes = True
|
||||
rt.AlembicImport.UVs = True
|
||||
rt.AlembicImport.VertexColors = True
|
||||
rt.importFile(file_path, rt.name("noPrompt"))
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
c
|
||||
|
|
@ -45,9 +52,22 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container.name = abc_container_name
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context, loader=self.__class__.__name__
|
||||
name, [abc_container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -55,21 +75,19 @@ class ModelAbcLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in rt.Selection:
|
||||
container = rt.GetNodeByName(abc_con.name)
|
||||
container.source = path
|
||||
rt.Select(container.Children)
|
||||
for abc_obj in rt.Selection:
|
||||
alembic_obj = rt.GetNodeByName(abc_obj.name)
|
||||
alembic_obj.source = path
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
|
|
|
|||
|
|
@ -1,7 +1,15 @@
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise, import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
|
||||
|
||||
|
|
@ -13,6 +21,7 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -20,39 +29,69 @@ class FbxModelLoader(load.LoaderPlugin):
|
|||
filepath = os.path.normpath(self.filepath_from_context(context))
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("create"))
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(filepath, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
|
||||
container = rt.GetNodeByName(name)
|
||||
if not container:
|
||||
container = rt.Container()
|
||||
container.name = name
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
|
||||
return containerise(
|
||||
name, [container], context, loader=self.__class__.__name__
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
rt.select(node.Children)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_fbx_obj in rt.selection:
|
||||
if rt.isValidNode(prev_fbx_obj):
|
||||
rt.Delete(prev_fbx_obj)
|
||||
|
||||
rt.FBXImporterSetParam("Animation", False)
|
||||
rt.FBXImporterSetParam("Cameras", False)
|
||||
rt.FBXImporterSetParam("Mode", rt.Name("merge"))
|
||||
rt.FBXImporterSetParam("AxisConversionMethod", True)
|
||||
rt.FBXImporterSetParam("UpAxis", "Y")
|
||||
rt.FBXImporterSetParam("Preserveinstances", True)
|
||||
rt.importFile(path, rt.name("noPrompt"), using=rt.FBXIMP)
|
||||
current_fbx_objects = rt.GetCurrentSelection()
|
||||
for fbx_object in current_fbx_objects:
|
||||
if fbx_object.Parent != inst_container:
|
||||
fbx_object.Parent = inst_container
|
||||
fbx_object.name = f"{namespace}:{fbx_object.name}"
|
||||
fbx_object.pos = transform_data[
|
||||
f"{fbx_object.name}.transform"]
|
||||
fbx_object.scale = transform_data[
|
||||
f"{fbx_object.name}.scale"]
|
||||
|
||||
for children in node.Children:
|
||||
if rt.classOf(children) == rt.Container:
|
||||
if children.name == sub_node_name:
|
||||
update_custom_attribute_data(
|
||||
children, current_fbx_objects)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
node_name,
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,18 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
maintained_selection,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -14,6 +24,7 @@ class ObjLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -22,36 +33,49 @@ class ObjLoader(load.LoaderPlugin):
|
|||
self.log.debug("Executing command to import..")
|
||||
|
||||
rt.Execute(f'importFile @"{filepath}" #noPrompt using:ObjImp')
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
# create "missing" container for obj import
|
||||
container = rt.Container()
|
||||
container.name = name
|
||||
|
||||
container = rt.Container(name=f"{namespace}:{name}_{self.postfix}")
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(container, selections)
|
||||
# get current selection
|
||||
for selection in rt.GetCurrentSelection():
|
||||
for selection in selections:
|
||||
selection.Parent = container
|
||||
|
||||
asset = rt.GetNodeByName(name)
|
||||
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.GetNodeByName(node_name)
|
||||
|
||||
instance_name, _ = node_name.split("_")
|
||||
container = rt.GetNodeByName(instance_name)
|
||||
for child in container.Children:
|
||||
rt.Delete(child)
|
||||
node = rt.getNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
rt.Select(inst_container.Children)
|
||||
transform_data = object_transform_set(inst_container.Children)
|
||||
for prev_obj in rt.selection:
|
||||
if rt.isValidNode(prev_obj):
|
||||
rt.Delete(prev_obj)
|
||||
|
||||
rt.Execute(f'importFile @"{path}" #noPrompt using:ObjImp')
|
||||
# get current selection
|
||||
for selection in rt.GetCurrentSelection():
|
||||
selection.Parent = container
|
||||
|
||||
selections = rt.GetCurrentSelection()
|
||||
update_custom_attribute_data(inst_container, selections)
|
||||
for selection in selections:
|
||||
selection.Parent = inst_container
|
||||
selection.name = f"{namespace}:{selection.name}"
|
||||
selection.pos = transform_data[
|
||||
f"{selection.name}.transform"]
|
||||
selection.scale = transform_data[
|
||||
f"{selection.name}.scale"]
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,16 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace,
|
||||
get_namespace,
|
||||
object_transform_set
|
||||
)
|
||||
from openpype.hosts.max.api.lib import maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -15,6 +23,7 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,11 +39,24 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
rt.LogLevel = rt.Name("info")
|
||||
rt.USDImporter.importFile(filepath,
|
||||
importOptions=import_options)
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
asset = rt.GetNodeByName(name)
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
for usd_asset in asset.Children:
|
||||
usd_asset.name = f"{namespace}:{usd_asset.name}"
|
||||
|
||||
asset_name = f"{namespace}:{name}_{self.postfix}"
|
||||
asset.name = asset_name
|
||||
# need to get the correct container after renamed
|
||||
asset = rt.GetNodeByName(asset_name)
|
||||
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [asset], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -42,11 +64,16 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node_name = container["instance_node"]
|
||||
node = rt.GetNodeByName(node_name)
|
||||
namespace, name = get_namespace(node_name)
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
transform_data = None
|
||||
for n in node.Children:
|
||||
for r in n.Children:
|
||||
rt.Delete(r)
|
||||
rt.Select(n.Children)
|
||||
transform_data = object_transform_set(n.Children)
|
||||
for prev_usd_asset in rt.selection:
|
||||
if rt.isValidNode(prev_usd_asset):
|
||||
rt.Delete(prev_usd_asset)
|
||||
rt.Delete(n)
|
||||
instance_name, _ = node_name.split("_")
|
||||
|
||||
import_options = rt.USDImporter.CreateOptions()
|
||||
base_filename = os.path.basename(path)
|
||||
|
|
@ -55,11 +82,20 @@ class ModelUSDLoader(load.LoaderPlugin):
|
|||
|
||||
rt.LogPath = log_filepath
|
||||
rt.LogLevel = rt.Name("info")
|
||||
rt.USDImporter.importFile(path,
|
||||
importOptions=import_options)
|
||||
rt.USDImporter.importFile(
|
||||
path, importOptions=import_options)
|
||||
|
||||
asset = rt.GetNodeByName(instance_name)
|
||||
asset = rt.GetNodeByName(name)
|
||||
asset.Parent = node
|
||||
import_custom_attribute_data(asset, asset.Children)
|
||||
for children in asset.Children:
|
||||
children.name = f"{namespace}:{children.name}"
|
||||
children.pos = transform_data[
|
||||
f"{children.name}.transform"]
|
||||
children.scale = transform_data[
|
||||
f"{children.name}.scale"]
|
||||
|
||||
asset.name = sub_node_name
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,12 @@ Because of limited api, alembics can be only loaded, but not easily updated.
|
|||
import os
|
||||
from openpype.pipeline import load, get_representation_path
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import unique_namespace
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
|
||||
|
||||
class AbcLoader(load.LoaderPlugin):
|
||||
|
|
@ -19,6 +24,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -33,7 +39,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
}
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
rt.importFile(file_path, rt.name("noPrompt"))
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
c
|
||||
|
|
@ -48,13 +54,27 @@ class AbcLoader(load.LoaderPlugin):
|
|||
self.log.error("Something failed when loading.")
|
||||
|
||||
abc_container = abc_containers.pop()
|
||||
|
||||
for abc in rt.GetCurrentSelection():
|
||||
selections = rt.GetCurrentSelection()
|
||||
import_custom_attribute_data(
|
||||
abc_container, abc_container.Children)
|
||||
for abc in selections:
|
||||
for cam_shape in abc.Children:
|
||||
cam_shape.playbackType = 2
|
||||
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
for abc_object in abc_container.Children:
|
||||
abc_object.name = f"{namespace}:{abc_object.name}"
|
||||
# rename the abc container with namespace
|
||||
abc_container_name = f"{namespace}:{name}_{self.postfix}"
|
||||
abc_container.name = abc_container_name
|
||||
|
||||
return containerise(
|
||||
name, [abc_container], context, loader=self.__class__.__name__
|
||||
name, [abc_container], context,
|
||||
namespace, loader=self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
@ -63,28 +83,23 @@ class AbcLoader(load.LoaderPlugin):
|
|||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
|
||||
alembic_objects = self.get_container_children(node, "AlembicObject")
|
||||
for alembic_object in alembic_objects:
|
||||
alembic_object.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
|
||||
for alembic in rt.Selection:
|
||||
abc = rt.GetNodeByName(alembic.name)
|
||||
update_custom_attribute_data(abc, abc.Children)
|
||||
rt.Select(abc.Children)
|
||||
for abc_con in rt.Selection:
|
||||
container = rt.GetNodeByName(abc_con.name)
|
||||
container.source = path
|
||||
rt.Select(container.Children)
|
||||
for abc_obj in rt.Selection:
|
||||
alembic_obj = rt.GetNodeByName(abc_obj.name)
|
||||
alembic_obj.source = path
|
||||
for abc_con in abc.Children:
|
||||
abc_con.source = path
|
||||
rt.Select(abc_con.Children)
|
||||
for abc_obj in abc_con.Children:
|
||||
abc_obj.source = path
|
||||
|
||||
lib.imprint(
|
||||
container["instance_node"],
|
||||
{"representation": str(representation["_id"])},
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,14 @@
|
|||
import os
|
||||
|
||||
from openpype.hosts.max.api import lib, maintained_selection
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.pipeline import get_representation_path, load
|
||||
|
||||
|
||||
|
|
@ -13,6 +20,7 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
"""load point cloud by tyCache"""
|
||||
|
|
@ -22,10 +30,19 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
obj = rt.tyCache()
|
||||
obj.filename = filepath
|
||||
|
||||
prt_container = rt.GetNodeByName(obj.name)
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
prt_container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
import_custom_attribute_data(prt_container, [obj])
|
||||
obj.Parent = prt_container
|
||||
obj.name = f"{namespace}:{obj.name}"
|
||||
|
||||
return containerise(
|
||||
name, [prt_container], context, loader=self.__class__.__name__)
|
||||
name, [prt_container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""update the container"""
|
||||
|
|
@ -33,15 +50,18 @@ class PointCloudLoader(load.LoaderPlugin):
|
|||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.GetNodeByName(container["instance_node"])
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
with maintained_selection():
|
||||
rt.Select(node.Children)
|
||||
for prt in rt.Selection:
|
||||
prt_object = rt.GetNodeByName(prt.name)
|
||||
prt_object.filename = path
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
for prt in inst_container.Children:
|
||||
prt.filename = path
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -5,8 +5,15 @@ from openpype.pipeline import (
|
|||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.max.api.pipeline import containerise
|
||||
from openpype.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
import_custom_attribute_data,
|
||||
update_custom_attribute_data
|
||||
)
|
||||
from openpype.hosts.max.api import lib
|
||||
from openpype.hosts.max.api.lib import (
|
||||
unique_namespace, get_namespace
|
||||
)
|
||||
|
||||
|
||||
class RedshiftProxyLoader(load.LoaderPlugin):
|
||||
|
|
@ -18,6 +25,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
postfix = "param"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
|
@ -30,24 +38,32 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
if collections:
|
||||
rs_proxy.is_sequence = True
|
||||
|
||||
container = rt.container()
|
||||
container.name = name
|
||||
namespace = unique_namespace(
|
||||
name + "_",
|
||||
suffix="_",
|
||||
)
|
||||
container = rt.Container(
|
||||
name=f"{namespace}:{name}_{self.postfix}")
|
||||
rs_proxy.Parent = container
|
||||
|
||||
asset = rt.getNodeByName(name)
|
||||
rs_proxy.name = f"{namespace}:{rs_proxy.name}"
|
||||
import_custom_attribute_data(container, [rs_proxy])
|
||||
|
||||
return containerise(
|
||||
name, [asset], context, loader=self.__class__.__name__)
|
||||
name, [container], context,
|
||||
namespace, loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
path = get_representation_path(representation)
|
||||
node = rt.getNodeByName(container["instance_node"])
|
||||
for children in node.Children:
|
||||
children_node = rt.getNodeByName(children.name)
|
||||
for proxy in children_node.Children:
|
||||
proxy.file = path
|
||||
namespace, name = get_namespace(container["instance_node"])
|
||||
sub_node_name = f"{namespace}:{name}_{self.postfix}"
|
||||
inst_container = rt.getNodeByName(sub_node_name)
|
||||
|
||||
update_custom_attribute_data(
|
||||
inst_container, inst_container.Children)
|
||||
for proxy in inst_container.Children:
|
||||
proxy.file = path
|
||||
|
||||
lib.imprint(container["instance_node"], {
|
||||
"representation": str(representation["_id"])
|
||||
|
|
|
|||
|
|
@ -34,6 +34,9 @@ class CollectRender(pyblish.api.InstancePlugin):
|
|||
aovs = RenderProducts().get_aovs(instance.name)
|
||||
files_by_aov.update(aovs)
|
||||
|
||||
camera = rt.viewport.GetCamera()
|
||||
instance.data["cameras"] = [camera.name] if camera else None # noqa
|
||||
|
||||
if "expectedFiles" not in instance.data:
|
||||
instance.data["expectedFiles"] = list()
|
||||
instance.data["files"] = list()
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class ValidateMaxContents(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
families = ["camera",
|
||||
"maxScene",
|
||||
"maxrender",
|
||||
"review"]
|
||||
hosts = ["max"]
|
||||
label = "Max Scene Contents"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin)
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
from openpype.hosts.max.api.lib import get_current_renderer
|
||||
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateRenderableCamera(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates Renderable Camera
|
||||
|
||||
Check if the renderable camera used for rendering
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["maxrender"]
|
||||
hosts = ["max"]
|
||||
label = "Renderable Camera"
|
||||
optional = True
|
||||
actions = [RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
if not instance.data["cameras"]:
|
||||
raise PublishValidationError(
|
||||
"No renderable Camera found in scene."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
rt.viewport.setType(rt.Name("view_camera"))
|
||||
camera = rt.viewport.GetCamera()
|
||||
cls.log.info(f"Camera {camera} set as renderable camera")
|
||||
renderer_class = get_current_renderer()
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer == "Arnold":
|
||||
arv = rt.MAXToAOps.ArnoldRenderView()
|
||||
arv.setOption("Camera", str(camera))
|
||||
arv.close()
|
||||
instance.data["cameras"] = [camera.name]
|
||||
|
|
@ -249,7 +249,6 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
Authenticate with Muster, collect all data, prepare path for post
|
||||
render publish job and submit job to farm.
|
||||
"""
|
||||
instance.data["toBeRenderedOn"] = "muster"
|
||||
# setup muster environment
|
||||
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError, RepairAction
|
||||
)
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
from openpype.hosts.maya.api.lib import imprint
|
||||
|
||||
|
||||
class ValidateInstanceAttributes(pyblish.api.InstancePlugin):
|
||||
"""Validate Instance Attributes.
|
||||
|
||||
New attributes can be introduced as new features come in. Old instances
|
||||
will need to be updated with these attributes for the documentation to make
|
||||
sense, and users do not have to recreate the instances.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["*"]
|
||||
label = "Instance Attributes"
|
||||
plugins_by_family = {
|
||||
p.family: p for p in discover_legacy_creator_plugins()
|
||||
}
|
||||
actions = [RepairAction]
|
||||
|
||||
@classmethod
|
||||
def get_missing_attributes(self, instance):
|
||||
plugin = self.plugins_by_family[instance.data["family"]]
|
||||
subset = instance.data["subset"]
|
||||
asset = instance.data["asset"]
|
||||
objset = instance.data["objset"]
|
||||
|
||||
missing_attributes = {}
|
||||
for key, value in plugin(subset, asset).data.items():
|
||||
if not cmds.objExists("{}.{}".format(objset, key)):
|
||||
missing_attributes[key] = value
|
||||
|
||||
return missing_attributes
|
||||
|
||||
def process(self, instance):
|
||||
objset = instance.data.get("objset")
|
||||
if objset is None:
|
||||
self.log.debug(
|
||||
"Skipping {} because no objectset found.".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
missing_attributes = self.get_missing_attributes(instance)
|
||||
if missing_attributes:
|
||||
raise PublishValidationError(
|
||||
"Missing attributes on {}:\n{}".format(
|
||||
objset, missing_attributes
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
imprint(instance.data["objset"], cls.get_missing_attributes(instance))
|
||||
|
|
@ -3,94 +3,19 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Select Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is None
|
||||
or result["instance"] is None
|
||||
or result["instance"] in failed
|
||||
or result["plugin"] != plugin
|
||||
):
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
if instances:
|
||||
self.log.info(
|
||||
"Selecting invalid nodes: %s" % ", ".join(
|
||||
[str(x) for x in instances]
|
||||
)
|
||||
)
|
||||
self.select(instances)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
|
||||
def deselect(self):
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
self.set_attribute(instance, context_asset)
|
||||
|
||||
def set_attribute(self, instance, context_asset):
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
|
|
@ -104,11 +29,49 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
|||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["maya"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset = instance.data.get("asset")
|
||||
context_asset = instance.context.data["assetEntity"]["name"]
|
||||
msg = "{} has asset {}".format(instance.name, asset)
|
||||
context_asset = self.get_context_asset(instance)
|
||||
if asset != context_asset:
|
||||
raise PublishValidationError(msg)
|
||||
raise PublishValidationError(
|
||||
message=(
|
||||
"Instance '{}' publishes to different asset than current "
|
||||
"context: {}. Current context: {}".format(
|
||||
instance.name, asset, context_asset
|
||||
)
|
||||
),
|
||||
description=(
|
||||
"## Publishing to a different asset\n"
|
||||
"There are publish instances present which are publishing "
|
||||
"into a different asset than your current context.\n\n"
|
||||
"Usually this is not what you want but there can be cases "
|
||||
"where you might want to publish into another asset or "
|
||||
"shot. If that's the case you can disable the validation "
|
||||
"on the instance to ignore it."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
return [instance.data["instance_node"]]
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
context_asset = cls.get_context_asset(instance)
|
||||
instance_node = instance.data["instance_node"]
|
||||
cmds.setAttr(
|
||||
"{}.asset".format(instance_node),
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_context_asset(instance):
|
||||
return instance.context.data["assetEntity"]["name"]
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.maya.api.lib import pairwise
|
||||
from openpype.hosts.maya.api.action import SelectInvalidAction
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
|
|
@ -19,31 +21,33 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ["workfile"]
|
||||
label = "Plug-in Path Attributes"
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def get_invalid(self, instance):
|
||||
# Attributes are defined in project settings
|
||||
attribute = []
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
invalid = list()
|
||||
|
||||
# get the project setting
|
||||
validate_path = (
|
||||
instance.context.data["project_settings"]["maya"]["publish"]
|
||||
)
|
||||
file_attr = validate_path["ValidatePluginPathAttributes"]["attribute"]
|
||||
file_attr = cls.attribute
|
||||
if not file_attr:
|
||||
return invalid
|
||||
|
||||
# get the nodes and file attributes
|
||||
for node, attr in file_attr.items():
|
||||
# check the related nodes
|
||||
targets = cmds.ls(type=node)
|
||||
# Consider only valid node types to avoid "Unknown object type" warning
|
||||
all_node_types = set(cmds.allNodeTypes())
|
||||
node_types = [key for key in file_attr.keys() if key in all_node_types]
|
||||
|
||||
for target in targets:
|
||||
# get the filepath
|
||||
file_attr = "{}.{}".format(target, attr)
|
||||
filepath = cmds.getAttr(file_attr)
|
||||
for node, node_type in pairwise(cmds.ls(type=node_types,
|
||||
showType=True)):
|
||||
# get the filepath
|
||||
file_attr = "{}.{}".format(node, file_attr[node_type])
|
||||
filepath = cmds.getAttr(file_attr)
|
||||
|
||||
if filepath and not os.path.exists(filepath):
|
||||
self.log.error("File {0} not exists".format(filepath)) # noqa
|
||||
invalid.append(target)
|
||||
if filepath and not os.path.exists(filepath):
|
||||
cls.log.error("{} '{}' uses non-existing filepath: {}"
|
||||
.format(node_type, node, filepath))
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -51,5 +55,16 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
|
|||
"""Process all directories Set as Filenames in Non-Maya Nodes"""
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Non-existent Path "
|
||||
"found: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
title="Plug-in Path Attributes",
|
||||
message="Non-existent filepath found on nodes: {}".format(
|
||||
", ".join(invalid)
|
||||
),
|
||||
description=(
|
||||
"## Plug-in nodes use invalid filepaths\n"
|
||||
"The workfile contains nodes from plug-ins that use "
|
||||
"filepaths which do not exist.\n\n"
|
||||
"Please make sure their filepaths are correct and the "
|
||||
"files exist on disk."
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from openpype.hosts.maya.api import lib
|
|||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
RepairAction,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -67,5 +68,30 @@ class ValidateShapeZero(pyblish.api.Validator):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Shapes found with non-zero component tweaks: "
|
||||
"{0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
title="Shape Component Tweaks",
|
||||
message="Shapes found with non-zero component tweaks: '{}'"
|
||||
"".format(", ".join(invalid)),
|
||||
description=(
|
||||
"## Shapes found with component tweaks\n"
|
||||
"Shapes were detected that have component tweaks on their "
|
||||
"components. Please remove the component tweaks to "
|
||||
"continue.\n\n"
|
||||
"### Repair\n"
|
||||
"The repair action will try to *freeze* the component "
|
||||
"tweaks into the shapes, which is usually the correct fix "
|
||||
"if the mesh has no construction history (= has its "
|
||||
"history deleted)."),
|
||||
detail=(
|
||||
"Maya allows to store component tweaks within shape nodes "
|
||||
"which are applied between its `inMesh` and `outMesh` "
|
||||
"connections resulting in the output of a shape node "
|
||||
"differing from the input. We usually want to avoid this "
|
||||
"for published meshes (in particular for Maya scenes) as "
|
||||
"it can have unintended results when using these meshes "
|
||||
"as intermediate meshes since it applies positional "
|
||||
"differences without being visible edits in the node "
|
||||
"graph.\n\n"
|
||||
"These tweaks are traditionally stored in the `.pnts` "
|
||||
"attribute of shapes.")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2041,6 +2041,7 @@ class WorkfileSettings(object):
|
|||
)
|
||||
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
|
||||
|
||||
if not config_data:
|
||||
# TODO: backward compatibility for old projects - remove later
|
||||
|
|
@ -2091,6 +2092,15 @@ class WorkfileSettings(object):
|
|||
workfile_settings.pop("colorManagement", None)
|
||||
workfile_settings.pop("OCIO_config", None)
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut = workfile_settings.pop("monitorLut", None)
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
viewer_process_settings, monitor_lut)
|
||||
|
||||
# set monitor related knobs luts (MonitorOut, Thumbnails)
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
workfile_settings[knob] = value_
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in workfile_settings.items():
|
||||
# skip unfilled ocio config path
|
||||
|
|
@ -2107,8 +2117,9 @@ class WorkfileSettings(object):
|
|||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
log.info("OCIO config path found: `{}`".format(
|
||||
config_data["path"]))
|
||||
config_path))
|
||||
|
||||
# check if there's a mismatch between environment and settings
|
||||
correct_settings = self._is_settings_matching_environment(
|
||||
|
|
@ -2118,6 +2129,40 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
||||
Args:
|
||||
viewer_lut (str): viewer lut string
|
||||
monitor_lut (str): monitor lut string
|
||||
|
||||
Returns:
|
||||
dict: monitor settings
|
||||
"""
|
||||
output_data = {}
|
||||
m_display, m_viewer = get_viewer_config_from_string(monitor_lut)
|
||||
v_display, v_viewer = get_viewer_config_from_string(viewer_lut)
|
||||
|
||||
# set monitor lut differently for nuke version 14
|
||||
if nuke.NUKE_VERSION_MAJOR >= 14:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=False)
|
||||
|
||||
if nuke.NUKE_VERSION_MAJOR == 13:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=True)
|
||||
if nuke.NUKE_VERSION_MAJOR <= 12:
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=True)
|
||||
|
||||
return output_data
|
||||
|
||||
def _is_settings_matching_environment(self, config_data):
|
||||
""" Check if OCIO config path is different from environment
|
||||
|
||||
|
|
@ -2177,6 +2222,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
"""
|
||||
# replace path with env var if possible
|
||||
ocio_path = self._replace_ocio_path_with_env_var(config_data)
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
log.info("Setting OCIO config path to: `{}`".format(
|
||||
ocio_path))
|
||||
|
|
@ -2232,7 +2278,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
Returns:
|
||||
str: OCIO config path with environment variable TCL expression
|
||||
"""
|
||||
config_path = config_data["path"]
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
config_template = config_data["template"]
|
||||
|
||||
included_vars = self._get_included_vars(config_template)
|
||||
|
|
@ -3320,11 +3366,11 @@ def get_viewer_config_from_string(input_string):
|
|||
display = split[0]
|
||||
elif "(" in viewer:
|
||||
pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]"
|
||||
result = re.findall(pattern, viewer)
|
||||
result_ = re.findall(pattern, viewer)
|
||||
try:
|
||||
result = result.pop()
|
||||
display = str(result[1]).rstrip()
|
||||
viewer = str(result[0]).rstrip()
|
||||
result_ = result_.pop()
|
||||
display = str(result_[1]).rstrip()
|
||||
viewer = str(result_[0]).rstrip()
|
||||
except IndexError:
|
||||
raise IndexError((
|
||||
"Viewer Input string is not correct. "
|
||||
|
|
@ -3332,3 +3378,22 @@ def get_viewer_config_from_string(input_string):
|
|||
).format(input_string))
|
||||
|
||||
return (display, viewer)
|
||||
|
||||
|
||||
def create_viewer_profile_string(viewer, display=None, path_like=False):
|
||||
"""Convert viewer and display to string
|
||||
|
||||
Args:
|
||||
viewer (str): viewer name
|
||||
display (Optional[str]): display name
|
||||
path_like (Optional[bool]): if True, return path like string
|
||||
|
||||
Returns:
|
||||
str: viewer config string
|
||||
"""
|
||||
if not display:
|
||||
return viewer
|
||||
|
||||
if path_like:
|
||||
return "{}/{}".format(display, viewer)
|
||||
return "{} ({})".format(viewer, display)
|
||||
|
|
|
|||
|
|
@ -14,27 +14,26 @@ class RepairActionBase(pyblish.api.Action):
|
|||
# Get the errored instances
|
||||
return get_errored_instances_from_context(context, plugin=plugin)
|
||||
|
||||
def repair_knob(self, instances, state):
|
||||
def repair_knob(self, context, instances, state):
|
||||
create_context = context.data["create_context"]
|
||||
for instance in instances:
|
||||
node = instance.data["transientData"]["node"]
|
||||
files_remove = [os.path.join(instance.data["outputDir"], f)
|
||||
for r in instance.data.get("representations", [])
|
||||
for f in r.get("files", [])
|
||||
]
|
||||
self.log.info("Files to be removed: {}".format(files_remove))
|
||||
for f in files_remove:
|
||||
os.remove(f)
|
||||
self.log.debug("removing file: {}".format(f))
|
||||
node["render"].setValue(state)
|
||||
# Reset the render knob
|
||||
instance_id = instance.data.get("instance_id")
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance_id
|
||||
)
|
||||
created_instance.creator_attributes["render_target"] = state
|
||||
self.log.info("Rendering toggled to `{}`".format(state))
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
class RepairCollectionActionToLocal(RepairActionBase):
|
||||
label = "Repair - rerender with \"Local\""
|
||||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "Local")
|
||||
self.repair_knob(context, instances, "local")
|
||||
|
||||
|
||||
class RepairCollectionActionToFarm(RepairActionBase):
|
||||
|
|
@ -42,7 +41,7 @@ class RepairCollectionActionToFarm(RepairActionBase):
|
|||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "On farm")
|
||||
self.repair_knob(context, instances, "farm")
|
||||
|
||||
|
||||
class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
|
|
@ -87,6 +89,11 @@ class ValidateNukeWriteNode(
|
|||
correct_data
|
||||
))
|
||||
|
||||
# Collect key values of same type in a list.
|
||||
values_by_name = defaultdict(list)
|
||||
for knob_data in correct_data["knobs"]:
|
||||
values_by_name[knob_data["name"]].append(knob_data["value"])
|
||||
|
||||
for knob_data in correct_data["knobs"]:
|
||||
knob_type = knob_data["type"]
|
||||
self.log.debug("__ knob_type: {}".format(
|
||||
|
|
@ -105,28 +112,33 @@ class ValidateNukeWriteNode(
|
|||
)
|
||||
|
||||
key = knob_data["name"]
|
||||
value = knob_data["value"]
|
||||
values = values_by_name[key]
|
||||
node_value = write_node[key].value()
|
||||
|
||||
# fix type differences
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
fixed_values = []
|
||||
for value in values:
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
|
||||
self.log.debug("__ key: {} | value: {}".format(
|
||||
key, value
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
|
||||
fixed_values.append(value)
|
||||
|
||||
self.log.debug("__ key: {} | values: {}".format(
|
||||
key, fixed_values
|
||||
))
|
||||
if (
|
||||
node_value != value
|
||||
node_value not in fixed_values
|
||||
and key != "file"
|
||||
and key != "tile_color"
|
||||
):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
"""Hook to launch Unreal and prepare projects."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from qtpy import QtCore
|
||||
|
|
@ -224,10 +226,24 @@ class UnrealPrelaunchHook(PreLaunchHook):
|
|||
project_file = project_path / unreal_project_filename
|
||||
|
||||
if not project_file.is_file():
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_path)
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
Path(temp_dir))
|
||||
try:
|
||||
self.log.info((
|
||||
f"Moving from {temp_dir} to "
|
||||
f"{project_path.as_posix()}"
|
||||
))
|
||||
shutil.copytree(
|
||||
temp_dir, project_path, dirs_exist_ok=True)
|
||||
|
||||
except shutil.Error as e:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Cannot copy directory {temp_dir} "
|
||||
f"to {project_path.as_posix()} - {e}"
|
||||
)) from e
|
||||
|
||||
self.launch_context.env["AYON_UNREAL_VERSION"] = engine_version
|
||||
# Append project file to launch arguments
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import clique
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
representations = instance.data.get("representations")
|
||||
for repr in representations:
|
||||
data = instance.data.get("assetEntity", {}).get("data", {})
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
repr_files = repr["files"]
|
||||
if isinstance(repr_files, str):
|
||||
continue
|
||||
|
||||
ext = repr.get("ext")
|
||||
if not ext:
|
||||
_, ext = os.path.splitext(repr_files[0])
|
||||
elif not ext.startswith("."):
|
||||
ext = ".{}".format(ext)
|
||||
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
|
||||
re.escape(ext))
|
||||
patterns = [pattern]
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
|
|
@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
if instance.data.get("slate"):
|
||||
# Slate is not part of the frame range
|
||||
frames = frames[1:]
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (data["clipIn"],
|
||||
data["clipOut"])
|
||||
|
|
|
|||
|
|
@ -280,13 +280,14 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
|
||||
for key, value in add_args.items():
|
||||
# Skip key values where value is None
|
||||
if value is not None:
|
||||
args.append("--{}".format(key))
|
||||
# Extend list into arguments (targets can be a list)
|
||||
if isinstance(value, (tuple, list)):
|
||||
args.extend(value)
|
||||
else:
|
||||
args.append(value)
|
||||
if value is None:
|
||||
continue
|
||||
arg_key = "--{}".format(key)
|
||||
if not isinstance(value, (tuple, list)):
|
||||
value = [value]
|
||||
|
||||
for item in value:
|
||||
args += [arg_key, item]
|
||||
|
||||
log.info("args:: {}".format(args))
|
||||
if add_to_queue:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import os
|
|||
import re
|
||||
import copy
|
||||
import inspect
|
||||
import collections
|
||||
import logging
|
||||
import weakref
|
||||
from uuid import uuid4
|
||||
|
|
@ -340,8 +341,8 @@ class EventSystem(object):
|
|||
event.emit()
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
def _process_event(self, event):
|
||||
"""Process event topic and trigger callbacks.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
|
|
@ -356,6 +357,91 @@ class EventSystem(object):
|
|||
for callback in invalid_callbacks:
|
||||
self._registered_callbacks.remove(callback)
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
self._process_event(event)
|
||||
|
||||
|
||||
class QueuedEventSystem(EventSystem):
|
||||
"""Events are automatically processed in queue.
|
||||
|
||||
If callback triggers another event, the event is not processed until
|
||||
all callbacks of previous event are processed.
|
||||
|
||||
Allows to implement custom event process loop by changing 'auto_execute'.
|
||||
|
||||
Note:
|
||||
This probably should be default behavior of 'EventSystem'. Changing it
|
||||
now could cause problems in existing code.
|
||||
|
||||
Args:
|
||||
auto_execute (Optional[bool]): If 'True', events are processed
|
||||
automatically. Custom loop calling 'process_next_event'
|
||||
must be implemented when set to 'False'.
|
||||
"""
|
||||
|
||||
def __init__(self, auto_execute=True):
|
||||
super(QueuedEventSystem, self).__init__()
|
||||
self._event_queue = collections.deque()
|
||||
self._current_event = None
|
||||
self._auto_execute = auto_execute
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
||||
|
||||
def count(self):
|
||||
"""Get number of events in queue.
|
||||
|
||||
Returns:
|
||||
int: Number of events in queue.
|
||||
"""
|
||||
|
||||
return len(self._event_queue)
|
||||
|
||||
def process_next_event(self):
|
||||
"""Process next event in queue.
|
||||
|
||||
Should be used only if 'auto_execute' is set to 'False'. Only single
|
||||
event is processed.
|
||||
|
||||
Returns:
|
||||
Union[Event, None]: Processed event.
|
||||
"""
|
||||
|
||||
if self._current_event is not None:
|
||||
raise ValueError("An event is already in progress.")
|
||||
|
||||
if not self._event_queue:
|
||||
return None
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
if not self._auto_execute or self._current_event is not None:
|
||||
self._event_queue.append(event)
|
||||
return
|
||||
|
||||
self._event_queue.append(event)
|
||||
while self._event_queue:
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
|
||||
|
||||
class GlobalEventSystem:
|
||||
"""Event system living in global scope of process.
|
||||
|
|
|
|||
|
|
@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
addons_info = _get_ayon_addons_information()
|
||||
if not addons_info:
|
||||
return v3_addons_to_skip
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
addons_dir = os.environ.get("AYON_ADDONS_DIR")
|
||||
if not addons_dir:
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
if not os.path.exists(addons_dir):
|
||||
log.warning("Addons directory does not exists. Path \"{}\"".format(
|
||||
addons_dir
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists.
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
|
|
@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
if k in default_servers
|
||||
}
|
||||
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
if instance_server not in project_enabled_servers:
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
)
|
||||
)
|
||||
)
|
||||
assert instance_server in project_enabled_servers, msg
|
||||
raise KnownPublishError(msg)
|
||||
|
||||
self.log.debug("Using project approved server.")
|
||||
return project_enabled_servers[instance_server]
|
||||
|
|
|
|||
|
|
@ -1,31 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<title>Deadline Pools</title>
|
||||
<description>
|
||||
## Invalid Deadline pools found
|
||||
## Invalid Deadline pools found
|
||||
|
||||
Configured pools don't match what is set in Deadline.
|
||||
Configured pools don't match available pools in Deadline.
|
||||
|
||||
{invalid_value_str}
|
||||
### How to repair?
|
||||
|
||||
### How to repair?
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
In other cases inform admin to change them in Settings.
|
||||
|
||||
In other cases inform admin to change them in Settings.
|
||||
Available deadline pools:
|
||||
|
||||
{pools_str}
|
||||
|
||||
Available deadline pools {pools_str}.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__
|
||||
### __Detailed Info__
|
||||
|
||||
This error is shown when deadline pool is not on Deadline anymore. It
|
||||
could happen in case of republish old workfile which was created with
|
||||
previous deadline pools,
|
||||
or someone changed pools on Deadline side, but didn't modify Openpype
|
||||
Settings.
|
||||
This error is shown when a configured pool is not available on Deadline. It
|
||||
can happen when publishing old workfiles which were created with previous
|
||||
deadline pools, or someone changed the available pools in Deadline,
|
||||
but didn't modify Openpype Settings to match the changes.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -27,7 +27,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
deadline_job_delay = "00:00:08:00"
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
|
|
|
|||
|
|
@ -265,7 +265,7 @@ class HarmonySubmitDeadline(
|
|||
job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
job_info.ChunkSize = self.chunk_size
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests:
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Department = self.department
|
||||
|
|
|
|||
|
|
@ -141,4 +141,3 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
|
|
|||
|
|
@ -176,7 +176,6 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
first_file = next(self._iter_expected_files(files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
|
|
@ -238,7 +237,10 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if renderer == "Redshift_Renderer":
|
||||
plugin_data["redshift_SeparateAovFiles"] = instance.data.get(
|
||||
"separateAovFiles")
|
||||
|
||||
if instance.data["cameras"]:
|
||||
plugin_info["Camera0"] = None
|
||||
plugin_info["Camera"] = instance.data["cameras"][0]
|
||||
plugin_info["Camera1"] = instance.data["cameras"][0]
|
||||
self.log.debug("plugin data:{}".format(plugin_data))
|
||||
plugin_info.update(plugin_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -300,7 +300,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
# Patch workfile (only when use_published is enabled)
|
||||
if self.use_published:
|
||||
|
|
|
|||
|
|
@ -97,7 +97,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["suspend_publish"] = instance.data["attributeValues"][
|
||||
"suspend_publish"]
|
||||
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
|
@ -12,34 +11,25 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer", "render"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.info(
|
||||
"We have deadline URL on instance {}".format(
|
||||
deadline_url))
|
||||
self.log.debug(
|
||||
"We have deadline URL on instance {}".format(deadline_url)
|
||||
)
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Check response
|
||||
response = self._requests_get(deadline_url)
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
)
|
||||
|
||||
def _requests_get(self, *args, **kwargs):
|
||||
""" Wrapper for requests, disabling SSL certificate validation if
|
||||
DONT_VERIFY_SSL environment variable is found. This is useful when
|
||||
Deadline or Muster server are running with self-signed certificates
|
||||
and their certificate is not added to trusted certificates on
|
||||
client machines.
|
||||
|
||||
WARNING: disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing and it is not recommended.
|
||||
"""
|
||||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -25,33 +25,58 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
"maxrender"]
|
||||
optional = True
|
||||
|
||||
# cache
|
||||
pools_per_url = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
self.log.info("deadline_url::{}".format(deadline_url))
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log)
|
||||
self.log.info("pools::{}".format(pools))
|
||||
|
||||
formatting_data = {
|
||||
"pools_str": ",".join(pools)
|
||||
}
|
||||
deadline_url = self.get_deadline_url(instance)
|
||||
pools = self.get_pools(deadline_url)
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
if primary_pool and primary_pool not in pools:
|
||||
msg = "Configured primary '{}' not present on Deadline".format(
|
||||
instance.data["primaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["primary"] = primary_pool
|
||||
|
||||
secondary_pool = instance.data.get("secondaryPool")
|
||||
if secondary_pool and secondary_pool not in pools:
|
||||
msg = "Configured secondary '{}' not present on Deadline".format(
|
||||
instance.data["secondaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["secondary"] = secondary_pool
|
||||
|
||||
if invalid_pools:
|
||||
message = "\n".join(
|
||||
"{} pool '{}' not available on Deadline".format(key.title(),
|
||||
pool)
|
||||
for key, pool in invalid_pools.items()
|
||||
)
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=message,
|
||||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_deadline_url(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
if instance.data.get("deadlineUrl"):
|
||||
# if custom one is set in instance, use that
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
return deadline_url
|
||||
|
||||
def get_pools(self, deadline_url):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url,
|
||||
log=self.log)
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
return self.pools_per_url[deadline_url]
|
||||
|
|
|
|||
|
|
@ -70,7 +70,10 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
repre["files"] = sorted(job_expected_files)
|
||||
# single item files must be string not list
|
||||
repre["files"] = (sorted(job_expected_files)
|
||||
if len(job_expected_files) > 1 else
|
||||
list(job_expected_files)[0])
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
|
|
|||
|
|
@ -91,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
|
|
|
|||
|
|
@ -547,7 +547,14 @@ def get_ayon_executable():
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
return exe_list
|
||||
|
||||
# Expand user paths
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
return ";".join(expanded_paths)
|
||||
|
||||
|
||||
def inject_render_job_id(deadlinePlugin):
|
||||
|
|
|
|||
|
|
@ -77,4 +77,22 @@ CategoryOrder=0
|
|||
Index=4
|
||||
Label=Harmony 20 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_21]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 21 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 21 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 21 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_21/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_22]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 22 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 22 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 22 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_22/lnx86_64/bin/HarmonyPremium
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
from System import *
|
||||
from System.Diagnostics import *
|
||||
from System.IO import *
|
||||
|
|
|
|||
|
|
@ -75,7 +75,6 @@ class RenderInstance(object):
|
|||
tilesY = attr.ib(default=0) # number of tiles in Y
|
||||
|
||||
# submit_publish_job
|
||||
toBeRenderedOn = attr.ib(default=None)
|
||||
deadlineSubmissionJob = attr.ib(default=None)
|
||||
anatomyData = attr.ib(default=None)
|
||||
outputDir = attr.ib(default=None)
|
||||
|
|
|
|||
|
|
@ -952,6 +952,7 @@ def replace_with_published_scene_path(instance, replace_in_path=True):
|
|||
|
||||
return file_path
|
||||
|
||||
|
||||
def add_repre_files_for_cleanup(instance, repre):
|
||||
""" Explicitly mark repre files to be deleted.
|
||||
|
||||
|
|
@ -960,7 +961,16 @@ def add_repre_files_for_cleanup(instance, repre):
|
|||
"""
|
||||
files = repre["files"]
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if not staging_dir or instance.data.get("stagingDir_persistent"):
|
||||
|
||||
# first make sure representation level is not persistent
|
||||
if (
|
||||
not staging_dir
|
||||
or repre.get("stagingDir_persistent")
|
||||
):
|
||||
return
|
||||
|
||||
# then look into instance level if it's not persistent
|
||||
if instance.data.get("stagingDir_persistent"):
|
||||
return
|
||||
|
||||
if isinstance(files, str):
|
||||
|
|
|
|||
35
openpype/plugins/publish/collect_farm_target.py
Normal file
35
openpype/plugins/publish/collect_farm_target.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFarmTarget(pyblish.api.InstancePlugin):
|
||||
"""Collects the render target for the instance
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Farm Target"
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
return
|
||||
|
||||
context = instance.context
|
||||
|
||||
farm_name = ""
|
||||
op_modules = context.data.get("openPypeModules")
|
||||
|
||||
for farm_renderer in ["deadline", "royalrender", "muster"]:
|
||||
op_module = op_modules.get(farm_renderer, False)
|
||||
|
||||
if op_module and op_module.enabled:
|
||||
farm_name = farm_renderer
|
||||
elif not op_module:
|
||||
self.log.error("Cannot get OpenPype {0} module.".format(
|
||||
farm_renderer))
|
||||
|
||||
if farm_name:
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
else:
|
||||
AssertionError("No OpenPype renderer module found")
|
||||
|
|
@ -8,6 +8,11 @@ from ayon_api import slugify_string
|
|||
from ayon_api.entity_hub import EntityHub
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.client import get_assets
|
||||
from openpype.pipeline.template_data import (
|
||||
get_asset_template_data,
|
||||
get_task_template_data,
|
||||
)
|
||||
|
||||
|
||||
def _default_json_parse(value):
|
||||
|
|
@ -27,13 +32,51 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = context.data.get("hierarchyContext")
|
||||
if not hierarchy_context:
|
||||
self.log.info("Skipping")
|
||||
self.log.debug("Skipping")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
self._create_hierarchy(context, project_name)
|
||||
self._fill_instance_entities(context, project_name)
|
||||
|
||||
def _fill_instance_entities(self, context, project_name):
|
||||
instances_by_asset_name = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
instance_entity = instance.data.get("assetEntity")
|
||||
if instance_entity:
|
||||
continue
|
||||
|
||||
# Skip if instance asset does not match
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
instances_by_asset_name[instance_asset_name].append(instance)
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
asset_docs = get_assets(
|
||||
project_name, asset_names=instances_by_asset_name.keys()
|
||||
)
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
for asset_name, instances in instances_by_asset_name.items():
|
||||
asset_doc = asset_docs_by_name[asset_name]
|
||||
asset_data = get_asset_template_data(asset_doc, project_name)
|
||||
for instance in instances:
|
||||
task_name = instance.data.get("task")
|
||||
template_data = get_task_template_data(
|
||||
project_doc, asset_doc, task_name)
|
||||
template_data.update(copy.deepcopy(asset_data))
|
||||
|
||||
instance.data["anatomyData"].update(template_data)
|
||||
instance.data["assetEntity"] = asset_doc
|
||||
|
||||
def _create_hierarchy(self, context, project_name):
|
||||
hierarchy_context = self._filter_hierarchy(context)
|
||||
if not hierarchy_context:
|
||||
self.log.info("All folders were filtered out")
|
||||
self.log.debug("All folders were filtered out")
|
||||
return
|
||||
|
||||
self.log.debug("Hierarchy_context: {}".format(
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ from openpype.pipeline.publish import (
|
|||
|
||||
|
||||
class ValidatePublishDir(pyblish.api.InstancePlugin):
|
||||
"""Validates if 'publishDir' is a project directory
|
||||
"""Validates if files are being published into a project directory
|
||||
|
||||
'publishDir' is collected based on publish templates. In specific cases
|
||||
('source' template) source folder of items is used as a 'publishDir', this
|
||||
validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, unaccessible for studio'
|
||||
In specific cases ('source' template - in place publishing) source folder
|
||||
of published items is used as a regular `publish` dir.
|
||||
This validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, inaccessible for studio')
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
# original_dirname must be convertable to rootless path
|
||||
# in other case it is path inside of root folder for the project
|
||||
success, _ = anatomy.find_root_template_from_path(original_dirname)
|
||||
|
||||
formatting_data = {
|
||||
|
|
@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
formatting_data=formatting_data)
|
||||
|
||||
def _get_template_name_from_instance(self, instance):
|
||||
"""Find template which will be used during integration."""
|
||||
project_name = instance.context.data["projectName"]
|
||||
host_name = instance.context.data["hostName"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
family = anatomy_data["family"]
|
||||
family = self.family_mapping.get("family") or family
|
||||
family = self.family_mapping.get(family) or family
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
|
||||
return get_publish_template_name(
|
||||
|
|
|
|||
|
|
@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
# TODO: Remove full non-html version upon drop of old publisher
|
||||
msg = (
|
||||
"Version '{0}' from instance '{1}' that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: '{2}'."
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: '{2}'."
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: '{2}'."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
||||
msg_html = (
|
||||
"Version <b>{0}</b> from instance <b>{1}</b> that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: <b>{2}</b>."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
|
|
|||
|
|
@ -616,6 +616,23 @@ def _convert_maya_project_settings(ayon_settings, output):
|
|||
output["maya"] = ayon_maya
|
||||
|
||||
|
||||
def _convert_3dsmax_project_settings(ayon_settings, output):
|
||||
if "max" not in ayon_settings:
|
||||
return
|
||||
|
||||
ayon_max = ayon_settings["max"]
|
||||
_convert_host_imageio(ayon_max)
|
||||
if "PointCloud" in ayon_max:
|
||||
point_cloud_attribute = ayon_max["PointCloud"]["attribute"]
|
||||
new_point_cloud_attribute = {
|
||||
item["name"]: item["value"]
|
||||
for item in point_cloud_attribute
|
||||
}
|
||||
ayon_max["PointCloud"]["attribute"] = new_point_cloud_attribute
|
||||
|
||||
output["max"] = ayon_max
|
||||
|
||||
|
||||
def _convert_nuke_knobs(knobs):
|
||||
new_knobs = []
|
||||
for knob in knobs:
|
||||
|
|
@ -737,6 +754,17 @@ def _convert_nuke_project_settings(ayon_settings, output):
|
|||
item_filter["subsets"] = item_filter.pop("product_names")
|
||||
item_filter["families"] = item_filter.pop("product_types")
|
||||
|
||||
reformat_nodes_config = item.get("reformat_nodes_config") or {}
|
||||
reposition_nodes = reformat_nodes_config.get(
|
||||
"reposition_nodes") or []
|
||||
|
||||
for reposition_node in reposition_nodes:
|
||||
if "knobs" not in reposition_node:
|
||||
continue
|
||||
reposition_node["knobs"] = _convert_nuke_knobs(
|
||||
reposition_node["knobs"]
|
||||
)
|
||||
|
||||
name = item.pop("name")
|
||||
new_review_data_outputs[name] = item
|
||||
ayon_publish["ExtractReviewDataMov"]["outputs"] = new_review_data_outputs
|
||||
|
|
@ -1261,6 +1289,7 @@ def convert_project_settings(ayon_settings, default_settings):
|
|||
_convert_flame_project_settings(ayon_settings, output)
|
||||
_convert_fusion_project_settings(ayon_settings, output)
|
||||
_convert_maya_project_settings(ayon_settings, output)
|
||||
_convert_3dsmax_project_settings(ayon_settings, output)
|
||||
_convert_nuke_project_settings(ayon_settings, output)
|
||||
_convert_hiero_project_settings(ayon_settings, output)
|
||||
_convert_photoshop_project_settings(ayon_settings, output)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.16.5-nightly.2"
|
||||
__version__ = "3.16.5-nightly.4"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_server.settings import BaseSettingsModel
|
|||
|
||||
class CreateRenderPlugin(BaseSettingsModel):
|
||||
mark_for_review: bool = Field(True, title="Review")
|
||||
defaults: list[str] = Field(
|
||||
default_variants: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Default Variants"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ DEFAULT_AFTEREFFECTS_SETTING = {
|
|||
"create": {
|
||||
"RenderCreator": {
|
||||
"mark_for_review": True,
|
||||
"defaults": [
|
||||
"default_variants": [
|
||||
"Main"
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from ayon_server.settings import (
|
|||
BaseSettingsModel,
|
||||
MultiplatformPathListModel,
|
||||
ensure_unique_names,
|
||||
task_types_enum,
|
||||
)
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
|
||||
|
|
@ -38,13 +39,52 @@ class CoreImageIOConfigModel(BaseSettingsModel):
|
|||
class CoreImageIOBaseModel(BaseSettingsModel):
|
||||
activate_global_color_management: bool = Field(
|
||||
False,
|
||||
title="Override global OCIO config"
|
||||
title="Enable Color Management"
|
||||
)
|
||||
ocio_config: CoreImageIOConfigModel = Field(
|
||||
default_factory=CoreImageIOConfigModel, title="OCIO config"
|
||||
default_factory=CoreImageIOConfigModel,
|
||||
title="OCIO config"
|
||||
)
|
||||
file_rules: CoreImageIOFileRulesModel = Field(
|
||||
default_factory=CoreImageIOFileRulesModel, title="File Rules"
|
||||
default_factory=CoreImageIOFileRulesModel,
|
||||
title="File Rules"
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryProfileModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
host_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Host names"
|
||||
)
|
||||
task_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task types",
|
||||
enum_resolver=task_types_enum
|
||||
)
|
||||
task_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task names"
|
||||
)
|
||||
product_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product types"
|
||||
)
|
||||
product_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product names"
|
||||
)
|
||||
version_start: int = Field(
|
||||
1,
|
||||
title="Version Start",
|
||||
ge=0
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryModel(BaseSettingsModel):
|
||||
profiles: list[VersionStartCategoryProfileModel] = Field(
|
||||
default_factory=list,
|
||||
title="Profiles"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -61,6 +101,10 @@ class CoreSettings(BaseSettingsModel):
|
|||
default_factory=GlobalToolsModel,
|
||||
title="Tools"
|
||||
)
|
||||
version_start_category: VersionStartCategoryModel = Field(
|
||||
default_factory=VersionStartCategoryModel,
|
||||
title="Version start"
|
||||
)
|
||||
imageio: CoreImageIOBaseModel = Field(
|
||||
default_factory=CoreImageIOBaseModel,
|
||||
title="Color Management (ImageIO)"
|
||||
|
|
@ -131,6 +175,9 @@ DEFAULT_VALUES = {
|
|||
"studio_code": "",
|
||||
"environments": "{}",
|
||||
"tools": DEFAULT_TOOLS_VALUES,
|
||||
"version_start_category": {
|
||||
"profiles": []
|
||||
},
|
||||
"publish": DEFAULT_PUBLISH_VALUES,
|
||||
"project_folder_structure": json.dumps({
|
||||
"__project_root__": {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -44,6 +44,6 @@ class RenderSettingsModel(BaseSettingsModel):
|
|||
DEFAULT_RENDER_SETTINGS = {
|
||||
"default_render_image_folder": "renders/3dsmax",
|
||||
"aov_separator": "underscore",
|
||||
"image_format": "png",
|
||||
"image_format": "exr",
|
||||
"multipass": True
|
||||
}
|
||||
|
|
|
|||
|
|
@ -252,7 +252,9 @@ DEFAULT_CREATORS_SETTINGS = {
|
|||
},
|
||||
"CreateUnrealSkeletalMesh": {
|
||||
"enabled": True,
|
||||
"default_variants": [],
|
||||
"default_variants": [
|
||||
"Main",
|
||||
],
|
||||
"joint_hints": "jnt_org"
|
||||
},
|
||||
"CreateMultiverseLook": {
|
||||
|
|
|
|||
|
|
@ -288,5 +288,22 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"allow_multiple_items": True,
|
||||
"allow_version_control": False,
|
||||
"extensions": []
|
||||
},
|
||||
{
|
||||
"product_type": "audio",
|
||||
"identifier": "",
|
||||
"label": "Audio ",
|
||||
"icon": "fa5s.file-audio",
|
||||
"default_variants": [
|
||||
"Main"
|
||||
],
|
||||
"description": "Audio product",
|
||||
"detailed_description": "Audio files for review or final delivery",
|
||||
"allow_sequences": False,
|
||||
"allow_multiple_items": False,
|
||||
"allow_version_control": False,
|
||||
"extensions": [
|
||||
".wav"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import logging
|
|||
from pyblish.api import Instance as PyblishInstance
|
||||
|
||||
from tests.lib.testing_classes import BaseTest
|
||||
from openpype.plugins.publish.validate_sequence_frames import (
|
||||
from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import (
|
||||
ValidateSequenceFrames
|
||||
)
|
||||
|
||||
|
|
@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
data = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1002,
|
||||
"representations": []
|
||||
"representations": [],
|
||||
"assetEntity": {
|
||||
"data": {
|
||||
"clipIn": 1001,
|
||||
"clipOut": 1002,
|
||||
}
|
||||
}
|
||||
}
|
||||
yield Instance
|
||||
|
||||
|
|
@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1001
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1001
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
|
||||
plugin.process(instance)
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.1001.v001.exr",
|
||||
"Main_beauty.1002.v001.exr"]])
|
||||
def test_validate_sequence_frames_wrong_name(self, instance,
|
||||
plugin, files):
|
||||
# tests for names with number inside, caused clique failure before
|
||||
representations = [
|
||||
{
|
||||
"ext": "exr",
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must detect single collection" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames_possible_wrong_name(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must not have remainder" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames__correct_ext(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"ext": "ass.gz",
|
||||
|
|
@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
instance.data["slate"] = True
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for delivery functions."""
|
||||
from openpype.lib.delivery import collect_frames
|
||||
from openpype.lib import collect_frames
|
||||
|
||||
|
||||
def test_collect_frames_multi_sequence():
|
||||
|
|
@ -153,4 +153,3 @@ def test_collect_frames_single_file():
|
|||
|
||||
print(ret)
|
||||
assert ret == expected, "Not matching"
|
||||
|
||||
|
|
|
|||
83
tests/unit/openpype/lib/test_event_system.py
Normal file
83
tests/unit/openpype/lib/test_event_system.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from openpype.lib.events import EventSystem, QueuedEventSystem
|
||||
|
||||
|
||||
def test_default_event_system():
|
||||
output = []
|
||||
expected_output = [3, 2, 1]
|
||||
event_system = EventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_base_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_manual_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem(auto_execute=False)
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
while True:
|
||||
if event_system.process_next_event() is None:
|
||||
break
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
|
@ -12,16 +12,19 @@
|
|||
removes temporary databases (?)
|
||||
"""
|
||||
import pytest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from tests.lib.testing_classes import ModuleUnitTest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.modules.sync_server.utils import SiteAlreadyPresentError
|
||||
|
||||
|
||||
|
||||
class TestSiteOperation(ModuleUnitTest):
|
||||
|
||||
REPRESENTATION_ID = "60e578d0c987036c6a7b741d"
|
||||
|
||||
TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt",
|
||||
TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX",
|
||||
"test_site_operations.zip", '')]
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
|
@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest):
|
|||
@pytest.mark.usefixtures("setup_sync_server_module")
|
||||
def test_add_site_again(self, dbcon, setup_sync_server_module):
|
||||
"""Depends on test_add_site, must throw exception."""
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(SiteAlreadyPresentError):
|
||||
setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
|
||||
self.REPRESENTATION_ID,
|
||||
site_name='test_site')
|
||||
|
|
|
|||
98
tools/docker_build.ps1
Normal file
98
tools/docker_build.ps1
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
$current_dir = Get-Location
|
||||
$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
$repo_root = (Get-Item $script_dir).parent.FullName
|
||||
|
||||
$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell"
|
||||
|
||||
function Exit-WithCode($exitcode) {
|
||||
# Only exit this host process if it's a child of another PowerShell parent process...
|
||||
$parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId
|
||||
$parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name
|
||||
if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) }
|
||||
|
||||
exit $exitcode
|
||||
}
|
||||
|
||||
function Restore-Cwd() {
|
||||
$tmp_current_dir = Get-Location
|
||||
if ("$tmp_current_dir" -ne "$current_dir") {
|
||||
Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray
|
||||
Set-Location -Path $current_dir
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Container {
|
||||
if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) {
|
||||
Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
$id = Get-Content "$($repo_root)\build\docker-image.id"
|
||||
Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White
|
||||
$cid = docker create $id bash
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
return $cid
|
||||
}
|
||||
|
||||
function Change-Cwd() {
|
||||
Set-Location -Path $repo_root
|
||||
}
|
||||
|
||||
function New-DockerBuild {
|
||||
$version_file = Get-Content -Path "$($repo_root)\openpype\version.py"
|
||||
$result = [regex]::Matches($version_file, '__version__ = "(?<version>\d+\.\d+.\d+.*)"')
|
||||
$openpype_version = $result[0].Groups['version'].Value
|
||||
$startTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White
|
||||
$variant = $args[0]
|
||||
if ($variant.Length -eq 0) {
|
||||
$dockerfile = "$($repo_root)\Dockerfile"
|
||||
} else {
|
||||
$dockerfile = "$( $repo_root )\Dockerfile.$variant"
|
||||
}
|
||||
if (-not (Test-Path -PathType Leaf -Path $dockerfile)) {
|
||||
Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White
|
||||
|
||||
$build_dir = "$($repo_root)\build"
|
||||
if (-not(Test-Path $build_dir)) {
|
||||
New-Item -ItemType Directory -Path $build_dir
|
||||
}
|
||||
Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray
|
||||
try {
|
||||
Remove-Item -Recurse -Force "$($build_dir)\*"
|
||||
} catch {
|
||||
Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray
|
||||
Write-Color -Text $_.Exception.Message -Color Red
|
||||
Exit-WithCode 1
|
||||
}
|
||||
|
||||
Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White
|
||||
docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile .
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White
|
||||
$cid = Get-Container
|
||||
|
||||
docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build"
|
||||
docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build"
|
||||
|
||||
$endTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
try {
|
||||
New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory."
|
||||
} catch {}
|
||||
Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray
|
||||
}
|
||||
|
||||
Change-Cwd
|
||||
New-DockerBuild $ARGS
|
||||
Loading…
Add table
Add a link
Reference in a new issue