mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Remove Houdini Mantra IDF product, fix #673
This commit is contained in:
parent
506c9975af
commit
64c57d4f6a
6 changed files with 3 additions and 119 deletions
|
|
@ -1,55 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating pointcache alembics."""
|
||||
from ayon_houdini.api import plugin
|
||||
from ayon_core.lib import BoolDef
|
||||
|
||||
|
||||
class CreateMantraIFD(plugin.HoudiniCreator):
|
||||
"""Mantra .ifd Archive"""
|
||||
identifier = "io.openpype.creators.houdini.mantraifd"
|
||||
label = "Mantra IFD"
|
||||
product_type = "mantraifd"
|
||||
icon = "gears"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "ifd"})
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
creator_attributes["farm"] = pre_create_data["farm"]
|
||||
instance = super(CreateMantraIFD, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
|
||||
filepath = "{}{}".format(
|
||||
hou.text.expandString("$HIP/pyblish/"),
|
||||
"{}.$F4.ifd".format(product_name))
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Arnold ROP settings
|
||||
"soho_diskfile": filepath,
|
||||
"soho_outputmode": 1
|
||||
}
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["soho_outputmode", "productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=False)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super().get_pre_create_attr_defs()
|
||||
# Use same attributes as for instance attributes
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
@ -12,9 +12,7 @@ class CollectDataforCache(plugin.HoudiniInstancePlugin):
|
|||
|
||||
# Run after Collect Frames
|
||||
order = pyblish.api.CollectorOrder + 0.11
|
||||
families = ["ass", "pointcache",
|
||||
"mantraifd", "redshiftproxy",
|
||||
"vdbcache", "model"]
|
||||
families = ["ass", "pointcache", "redshiftproxy", "vdbcache", "model"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Data for Cache"
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,7 @@ class CollectChunkSize(plugin.HoudiniInstancePlugin,
|
|||
"""Collect chunk size for cache submission to Deadline."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.05
|
||||
families = ["ass", "pointcache",
|
||||
"vdbcache", "mantraifd",
|
||||
"redshiftproxy", "model"]
|
||||
families = ["ass", "pointcache", "vdbcache", "redshiftproxy", "model"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Chunk Size"
|
||||
chunk_size = 999999
|
||||
|
|
|
|||
|
|
@ -16,8 +16,7 @@ class CollectFrames(plugin.HoudiniInstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Frames"
|
||||
families = ["vdbcache", "imagesequence", "ass",
|
||||
"mantraifd", "redshiftproxy", "review",
|
||||
"pointcache"]
|
||||
"redshiftproxy", "review", "pointcache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
import os
|
||||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_houdini.api import plugin
|
||||
|
||||
|
||||
class ExtractMantraIFD(plugin.HoudiniExtractorPlugin):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Mantra ifd"
|
||||
families = ["mantraifd"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
output = ropnode.evalParm("soho_diskfile")
|
||||
staging_dir = os.path.dirname(output)
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
files = instance.data["frames"]
|
||||
missing_frames = [
|
||||
frame
|
||||
for frame in instance.data["frames"]
|
||||
if not os.path.exists(
|
||||
os.path.normpath(os.path.join(staging_dir, frame)))
|
||||
]
|
||||
if missing_frames:
|
||||
raise RuntimeError("Failed to complete Mantra ifd extraction. "
|
||||
"Missing output files: {}".format(
|
||||
missing_frames))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ifd',
|
||||
'ext': 'ifd',
|
||||
'files': files,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -51,9 +51,6 @@ class CreatePluginsModel(BaseSettingsModel):
|
|||
CreateKarmaROP: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Karma ROP")
|
||||
CreateMantraIFD: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Mantra IFD")
|
||||
CreateMantraROP: CreatorModel = SettingsField(
|
||||
default_factory=CreatorModel,
|
||||
title="Create Mantra ROP")
|
||||
|
|
@ -119,10 +116,6 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
|
|||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateMantraIFD": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
},
|
||||
"CreateMantraROP": {
|
||||
"enabled": True,
|
||||
"default_variants": ["Main"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue