mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 13:24:54 +01:00
resolve conflict
This commit is contained in:
commit
be06640b45
380 changed files with 4321 additions and 1735 deletions
|
|
@ -51,8 +51,13 @@ IGNORED_MODULES_IN_AYON = set()
|
|||
# - this is used to log the missing addon
|
||||
MOVED_ADDON_MILESTONE_VERSIONS = {
|
||||
"applications": VersionInfo(0, 2, 0),
|
||||
"clockify": VersionInfo(0, 2, 0),
|
||||
"traypublisher": VersionInfo(0, 2, 0),
|
||||
"tvpaint": VersionInfo(0, 2, 0),
|
||||
"nuke": VersionInfo(0, 2, 0),
|
||||
}
|
||||
|
||||
|
||||
# Inherit from `object` for Python 2 hosts
|
||||
class _ModuleClass(object):
|
||||
"""Fake module class for storing AYON addons.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from ayon_applications import PreLaunchHook
|
||||
|
||||
from ayon_core.pipeline.colorspace import get_imageio_config
|
||||
from ayon_core.pipeline.template_data import get_template_data_with_names
|
||||
from ayon_core.pipeline.colorspace import get_imageio_config_preset
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
|
||||
|
||||
class OCIOEnvHook(PreLaunchHook):
|
||||
|
|
@ -26,32 +26,38 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
|
||||
template_data = get_template_data_with_names(
|
||||
project_name=self.data["project_name"],
|
||||
folder_path=self.data["folder_path"],
|
||||
task_name=self.data["task_name"],
|
||||
folder_entity = self.data["folder_entity"]
|
||||
|
||||
template_data = get_template_data(
|
||||
self.data["project_entity"],
|
||||
folder_entity=folder_entity,
|
||||
task_entity=self.data["task_entity"],
|
||||
host_name=self.host_name,
|
||||
settings=self.data["project_settings"]
|
||||
settings=self.data["project_settings"],
|
||||
)
|
||||
|
||||
config_data = get_imageio_config(
|
||||
project_name=self.data["project_name"],
|
||||
host_name=self.host_name,
|
||||
project_settings=self.data["project_settings"],
|
||||
anatomy_data=template_data,
|
||||
config_data = get_imageio_config_preset(
|
||||
self.data["project_name"],
|
||||
self.data["folder_path"],
|
||||
self.data["task_name"],
|
||||
self.host_name,
|
||||
anatomy=self.data["anatomy"],
|
||||
project_settings=self.data["project_settings"],
|
||||
template_data=template_data,
|
||||
env=self.launch_context.env,
|
||||
folder_id=folder_entity["id"],
|
||||
)
|
||||
|
||||
if config_data:
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
self.launch_context.env["OCIO"] = ocio_path
|
||||
else:
|
||||
if not config_data:
|
||||
self.log.debug("OCIO not set or enabled")
|
||||
return
|
||||
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
self.launch_context.env["OCIO"] = ocio_path
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def main(*subprocess_args):
|
|||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
elif os.environ.get("AVALON_AFTEREFFECTS_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
save = True
|
||||
|
|
|
|||
|
|
@ -365,3 +365,62 @@ def maintained_time():
|
|||
yield
|
||||
finally:
|
||||
bpy.context.scene.frame_current = current_time
|
||||
|
||||
|
||||
def get_all_parents(obj):
|
||||
"""Get all recursive parents of object.
|
||||
|
||||
Arguments:
|
||||
obj (bpy.types.Object): Object to get all parents for.
|
||||
|
||||
Returns:
|
||||
List[bpy.types.Object]: All parents of object
|
||||
|
||||
"""
|
||||
result = []
|
||||
while True:
|
||||
obj = obj.parent
|
||||
if not obj:
|
||||
break
|
||||
result.append(obj)
|
||||
return result
|
||||
|
||||
|
||||
def get_highest_root(objects):
|
||||
"""Get the highest object (the least parents) among the objects.
|
||||
|
||||
If multiple objects have the same amount of parents (or no parents) the
|
||||
first object found in the input iterable will be returned.
|
||||
|
||||
Note that this will *not* return objects outside of the input list, as
|
||||
such it will not return the root of node from a child node. It is purely
|
||||
intended to find the highest object among a list of objects. To instead
|
||||
get the root from one object use, e.g. `get_all_parents(obj)[-1]`
|
||||
|
||||
Arguments:
|
||||
objects (List[bpy.types.Object]): Objects to find the highest root in.
|
||||
|
||||
Returns:
|
||||
Optional[bpy.types.Object]: First highest root found or None if no
|
||||
`bpy.types.Object` found in input list.
|
||||
|
||||
"""
|
||||
included_objects = {obj.name_full for obj in objects}
|
||||
num_parents_to_obj = {}
|
||||
for obj in objects:
|
||||
if isinstance(obj, bpy.types.Object):
|
||||
parents = get_all_parents(obj)
|
||||
# included parents
|
||||
parents = [parent for parent in parents if
|
||||
parent.name_full in included_objects]
|
||||
if not parents:
|
||||
# A node without parents must be a highest root
|
||||
return obj
|
||||
|
||||
num_parents_to_obj.setdefault(len(parents), obj)
|
||||
|
||||
if not num_parents_to_obj:
|
||||
return
|
||||
|
||||
minimum_parent = min(num_parents_to_obj)
|
||||
return num_parents_to_obj[minimum_parent]
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@ from .ops import (
|
|||
)
|
||||
from .lib import imprint
|
||||
|
||||
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
|
||||
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx",
|
||||
".usd", ".usdc", ".usda"]
|
||||
|
||||
|
||||
def prepare_scene_name(
|
||||
|
|
|
|||
30
client/ayon_core/hosts/blender/plugins/create/create_usd.py
Normal file
30
client/ayon_core/hosts/blender/plugins/create/create_usd.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
"""Create a USD Export."""
|
||||
|
||||
from ayon_core.hosts.blender.api import plugin, lib
|
||||
|
||||
|
||||
class CreateUSD(plugin.BaseCreator):
|
||||
"""Create USD Export"""
|
||||
|
||||
identifier = "io.openpype.creators.blender.usd"
|
||||
name = "usdMain"
|
||||
label = "USD"
|
||||
product_type = "usd"
|
||||
icon = "gears"
|
||||
|
||||
def create(
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
objects = lib.get_selection()
|
||||
for obj in objects:
|
||||
collection.objects.link(obj)
|
||||
if obj.type == 'EMPTY':
|
||||
objects.extend(obj.children)
|
||||
|
||||
return collection
|
||||
|
|
@ -26,10 +26,10 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
Note:
|
||||
At least for now it only supports Alembic files.
|
||||
"""
|
||||
product_types = {"model", "pointcache", "animation"}
|
||||
representations = {"abc"}
|
||||
product_types = {"model", "pointcache", "animation", "usd"}
|
||||
representations = {"abc", "usd"}
|
||||
|
||||
label = "Load Alembic"
|
||||
label = "Load Cache"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
|
|
@ -53,10 +53,21 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
plugin.deselect_all()
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
bpy.ops.wm.alembic_import(
|
||||
filepath=libpath,
|
||||
relative_path=relative
|
||||
)
|
||||
|
||||
if any(libpath.lower().endswith(ext)
|
||||
for ext in [".usd", ".usda", ".usdc"]):
|
||||
# USD
|
||||
bpy.ops.wm.usd_import(
|
||||
filepath=libpath,
|
||||
relative_path=relative
|
||||
)
|
||||
|
||||
else:
|
||||
# Alembic
|
||||
bpy.ops.wm.alembic_import(
|
||||
filepath=libpath,
|
||||
relative_path=relative
|
||||
)
|
||||
|
||||
imported = lib.get_selection()
|
||||
|
||||
|
|
@ -43,7 +43,10 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
def _process(self, libpath, asset_group, group_name):
|
||||
plugin.deselect_all()
|
||||
|
||||
bpy.ops.wm.alembic_import(filepath=libpath)
|
||||
# Force the creation of the transform cache even if the camera
|
||||
# doesn't have an animation. We use the cache to update the camera.
|
||||
bpy.ops.wm.alembic_import(
|
||||
filepath=libpath, always_add_cache_reader=True)
|
||||
|
||||
objects = lib.get_selection()
|
||||
|
||||
|
|
@ -178,12 +181,33 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
for obj in asset_group.children:
|
||||
found = False
|
||||
for constraint in obj.constraints:
|
||||
if constraint.type == "TRANSFORM_CACHE":
|
||||
constraint.cache_file.filepath = libpath.as_posix()
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
# This is to keep compatibility with cameras loaded with
|
||||
# the old loader
|
||||
# Create a new constraint for the cache file
|
||||
constraint = obj.constraints.new("TRANSFORM_CACHE")
|
||||
bpy.ops.cachefile.open(filepath=libpath.as_posix())
|
||||
constraint.cache_file = bpy.data.cache_files[-1]
|
||||
constraint.cache_file.scale = 1.0
|
||||
|
||||
self._remove(asset_group)
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
# This is a workaround to set the object path. Blender doesn't
|
||||
# load the list of object paths until the object is evaluated.
|
||||
# This is a hack to force the object to be evaluated.
|
||||
# The modifier doesn't need to be removed because camera
|
||||
# objects don't have modifiers.
|
||||
obj.modifiers.new(
|
||||
name='MeshSequenceCache', type='MESH_SEQUENCE_CACHE')
|
||||
bpy.context.evaluated_depsgraph_get()
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
constraint.object_path = (
|
||||
constraint.cache_file.object_paths[0].path)
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model", "pointcache", "animation", "rig", "camera", "layout",
|
||||
"blendScene"]
|
||||
"blendScene", "usd"]
|
||||
label = "Collect Instance"
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin, lib
|
||||
|
||||
|
||||
class ExtractUSD(publish.Extractor):
|
||||
"""Extract as USD."""
|
||||
|
||||
label = "Extract USD"
|
||||
hosts = ["blender"]
|
||||
families = ["usd"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Ignore runtime instances (e.g. USD layers)
|
||||
# TODO: This is better done via more specific `families`
|
||||
if not instance.data.get("transientData", {}).get("instance_node"):
|
||||
return
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.usd"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.debug("Performing extraction..")
|
||||
|
||||
# Select all members to "export selected"
|
||||
plugin.deselect_all()
|
||||
|
||||
selected = []
|
||||
for obj in instance:
|
||||
if isinstance(obj, bpy.types.Object):
|
||||
obj.select_set(True)
|
||||
selected.append(obj)
|
||||
|
||||
root = lib.get_highest_root(objects=instance[:])
|
||||
if not root:
|
||||
instance_node = instance.data["transientData"]["instance_node"]
|
||||
raise publish.KnownPublishError(
|
||||
f"No root object found in instance: {instance_node.name}"
|
||||
)
|
||||
self.log.debug(f"Exporting using active root: {root.name}")
|
||||
|
||||
context = plugin.create_blender_context(
|
||||
active=root, selected=selected)
|
||||
|
||||
# Export USD
|
||||
with bpy.context.temp_override(**context):
|
||||
bpy.ops.wm.usd_export(
|
||||
filepath=filepath,
|
||||
selected_objects_only=True,
|
||||
export_textures=False,
|
||||
relative_paths=False,
|
||||
export_animation=False,
|
||||
export_hair=False,
|
||||
export_uvmaps=True,
|
||||
# TODO: add for new version of Blender (4+?)
|
||||
# export_mesh_colors=True,
|
||||
export_normals=True,
|
||||
export_materials=True,
|
||||
use_instancing=True
|
||||
)
|
||||
|
||||
plugin.deselect_all()
|
||||
|
||||
# Add representation
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data.setdefault("representations", []).append(representation)
|
||||
self.log.debug("Extracted instance '%s' to: %s",
|
||||
instance.name, representation)
|
||||
|
||||
|
||||
class ExtractModelUSD(ExtractUSD):
|
||||
"""Extract model as USD."""
|
||||
|
||||
label = "Extract USD (Model)"
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
|
||||
# Driven by settings
|
||||
optional = True
|
||||
|
|
@ -58,3 +58,55 @@ class SelectInvalidAction(pyblish.api.Action):
|
|||
self.log.info(
|
||||
"Selecting invalid tools: %s" % ", ".join(sorted(names))
|
||||
)
|
||||
|
||||
|
||||
class SelectToolAction(pyblish.api.Action):
|
||||
"""Select invalid output tool in Fusion when plug-in failed.
|
||||
|
||||
"""
|
||||
|
||||
label = "Select saver"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
errored_instances = get_errored_instances_from_context(
|
||||
context,
|
||||
plugin=plugin,
|
||||
)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
tools = []
|
||||
for instance in errored_instances:
|
||||
|
||||
tool = instance.data.get("tool")
|
||||
if tool is not None:
|
||||
tools.append(tool)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Plug-in returned to be invalid, "
|
||||
f"but has no saver for instance {instance.name}."
|
||||
)
|
||||
|
||||
if not tools:
|
||||
# Assume relevant comp is current comp and clear selection
|
||||
self.log.info("No invalid tools found.")
|
||||
comp = get_current_comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
flow.Select() # No args equals clearing selection
|
||||
return
|
||||
|
||||
# Assume a single comp
|
||||
first_tool = tools[0]
|
||||
comp = first_tool.Comp()
|
||||
flow = comp.CurrentFrame.FlowView
|
||||
flow.Select() # No args equals clearing selection
|
||||
names = set()
|
||||
for tool in tools:
|
||||
flow.Select(tool, True)
|
||||
comp.SetActiveTool(tool)
|
||||
names.add(tool.Name)
|
||||
self.log.info(
|
||||
"Selecting invalid tools: %s" % ", ".join(sorted(names))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ class CollectFusionRender(
|
|||
if product_type not in ["render", "image"]:
|
||||
continue
|
||||
|
||||
task_name = context.data["task"]
|
||||
task_name = inst.data["task"]
|
||||
tool = inst.data["transientData"]["tool"]
|
||||
|
||||
instance_families = inst.data.get("families", [])
|
||||
|
|
|
|||
|
|
@ -0,0 +1,80 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance context is the same as publish context."""
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.hosts.fusion.api.action import SelectToolAction
|
||||
from ayon_core.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContextFusion(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validator to check if instance context matches context of publish.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
"""
|
||||
# Similar to maya and houdini-equivalent `ValidateInstanceInContext`
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["fusion"]
|
||||
actions = [SelectToolAction, RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
instance_context = self.get_context(instance.data)
|
||||
context = self.get_context(instance.context.data)
|
||||
if instance_context != context:
|
||||
context_label = "{} > {}".format(*context)
|
||||
instance_label = "{} > {}".format(*instance_context)
|
||||
|
||||
raise PublishValidationError(
|
||||
message=(
|
||||
"Instance '{}' publishes to different asset than current "
|
||||
"context: {}. Current context: {}".format(
|
||||
instance.name, instance_label, context_label
|
||||
)
|
||||
),
|
||||
description=(
|
||||
"## Publishing to a different asset\n"
|
||||
"There are publish instances present which are publishing "
|
||||
"into a different asset than your current context.\n\n"
|
||||
"Usually this is not what you want but there can be cases "
|
||||
"where you might want to publish into another asset or "
|
||||
"shot. If that's the case you can disable the validation "
|
||||
"on the instance to ignore it."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
create_context = instance.context.data["create_context"]
|
||||
instance_id = instance.data.get("instance_id")
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance_id
|
||||
)
|
||||
if created_instance is None:
|
||||
raise RuntimeError(
|
||||
f"No CreatedInstances found with id '{instance_id} "
|
||||
f"in {create_context.instances_by_id}"
|
||||
)
|
||||
|
||||
context_asset, context_task = cls.get_context(instance.context.data)
|
||||
created_instance["folderPath"] = context_asset
|
||||
created_instance["task"] = context_task
|
||||
create_context.save_changes()
|
||||
|
||||
@staticmethod
|
||||
def get_context(data):
|
||||
"""Return asset, task from publishing context data"""
|
||||
return data["folderPath"], data["task"]
|
||||
|
|
@ -1110,10 +1110,7 @@ def apply_colorspace_project():
|
|||
'''
|
||||
# backward compatibility layer
|
||||
# TODO: remove this after some time
|
||||
config_data = get_imageio_config(
|
||||
project_name=get_current_project_name(),
|
||||
host_name="hiero"
|
||||
)
|
||||
config_data = get_current_context_imageio_config_preset()
|
||||
|
||||
if config_data:
|
||||
presets.update({
|
||||
|
|
|
|||
|
|
@ -13,11 +13,17 @@ class CreateArnoldRop(plugin.HoudiniCreator):
|
|||
# Default extension
|
||||
ext = "exr"
|
||||
|
||||
# Default to split export and render jobs
|
||||
export_job = True
|
||||
# Default render target
|
||||
render_target = "farm_split"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
# Remove the active, we are checking the bypass flag of the nodes
|
||||
instance_data.pop("active", None)
|
||||
|
|
@ -25,8 +31,6 @@ class CreateArnoldRop(plugin.HoudiniCreator):
|
|||
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 1
|
||||
# Submit for job publishing
|
||||
instance_data["farm"] = pre_create_data.get("farm")
|
||||
|
||||
instance = super(CreateArnoldRop, self).create(
|
||||
product_name,
|
||||
|
|
@ -51,7 +55,7 @@ class CreateArnoldRop(plugin.HoudiniCreator):
|
|||
"ar_exr_half_precision": 1 # half precision
|
||||
}
|
||||
|
||||
if pre_create_data.get("export_job"):
|
||||
if pre_create_data.get("render_target") == "farm_split":
|
||||
ass_filepath = \
|
||||
"{export_dir}{product_name}/{product_name}.$F4.ass".format(
|
||||
export_dir=hou.text.expandString("$HIP/pyblish/ass/"),
|
||||
|
|
@ -66,23 +70,41 @@ class CreateArnoldRop(plugin.HoudiniCreator):
|
|||
to_lock = ["productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateArnoldRop, self).get_pre_create_attr_defs()
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
"farm_split": "Farm Rendering - Split export & render jobs",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target),
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
image_format_enum = [
|
||||
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||
]
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=True),
|
||||
BoolDef("export_job",
|
||||
label="Split export and render jobs",
|
||||
default=self.export_job),
|
||||
attrs = [
|
||||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default=self.ext,
|
||||
label="Image Format Options")
|
||||
label="Image Format Options"),
|
||||
]
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
|
|
@ -11,15 +11,23 @@ class CreateKarmaROP(plugin.HoudiniCreator):
|
|||
product_type = "karma_rop"
|
||||
icon = "magic"
|
||||
|
||||
# Default render target
|
||||
render_target = "farm"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "karma"})
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 10
|
||||
# Submit for job publishing
|
||||
instance_data["farm"] = pre_create_data.get("farm")
|
||||
|
||||
instance = super(CreateKarmaROP, self).create(
|
||||
product_name,
|
||||
|
|
@ -86,18 +94,40 @@ class CreateKarmaROP(plugin.HoudiniCreator):
|
|||
to_lock = ["productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateKarmaROP, self).get_pre_create_attr_defs()
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target)
|
||||
]
|
||||
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
image_format_enum = [
|
||||
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||
]
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=True),
|
||||
attrs = super(CreateKarmaROP, self).get_pre_create_attr_defs()
|
||||
|
||||
attrs += [
|
||||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default="exr",
|
||||
|
|
@ -112,5 +142,6 @@ class CreateKarmaROP(plugin.HoudiniCreator):
|
|||
decimals=0),
|
||||
BoolDef("cam_res",
|
||||
label="Camera Resolution",
|
||||
default=False)
|
||||
default=False),
|
||||
]
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
|
|
@ -11,18 +11,22 @@ class CreateMantraROP(plugin.HoudiniCreator):
|
|||
product_type = "mantra_rop"
|
||||
icon = "magic"
|
||||
|
||||
# Default to split export and render jobs
|
||||
export_job = True
|
||||
# Default render target
|
||||
render_target = "farm_split"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
import hou # noqa
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "ifd"})
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 10
|
||||
# Submit for job publishing
|
||||
instance_data["farm"] = pre_create_data.get("farm")
|
||||
|
||||
instance = super(CreateMantraROP, self).create(
|
||||
product_name,
|
||||
|
|
@ -46,7 +50,7 @@ class CreateMantraROP(plugin.HoudiniCreator):
|
|||
"vm_picture": filepath,
|
||||
}
|
||||
|
||||
if pre_create_data.get("export_job"):
|
||||
if pre_create_data.get("render_target") == "farm_split":
|
||||
ifd_filepath = \
|
||||
"{export_dir}{product_name}/{product_name}.$F4.ifd".format(
|
||||
export_dir=hou.text.expandString("$HIP/pyblish/ifd/"),
|
||||
|
|
@ -77,21 +81,40 @@ class CreateMantraROP(plugin.HoudiniCreator):
|
|||
to_lock = ["productType", "id"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateMantraROP, self).get_pre_create_attr_defs()
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
"farm_split": "Farm Rendering - Split export & render jobs",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
image_format_enum = [
|
||||
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||
]
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=True),
|
||||
BoolDef("export_job",
|
||||
label="Split export and render jobs",
|
||||
default=self.export_job),
|
||||
attrs = super(CreateMantraROP, self).get_pre_create_attr_defs()
|
||||
|
||||
attrs += [
|
||||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default="exr",
|
||||
|
|
@ -100,5 +123,6 @@ class CreateMantraROP(plugin.HoudiniCreator):
|
|||
label="Override Camera Resolution",
|
||||
tooltip="Override the current camera "
|
||||
"resolution, recommended for IPR.",
|
||||
default=False)
|
||||
default=False),
|
||||
]
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
141
client/ayon_core/hosts/houdini/plugins/create/create_model.py
Normal file
141
client/ayon_core/hosts/houdini/plugins/create/create_model.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating Model product type.
|
||||
|
||||
Note:
|
||||
Currently, This creator plugin is the same as 'create_pointcache.py'
|
||||
But renaming the product type to 'model'.
|
||||
|
||||
It's purpose to support
|
||||
Maya (load/publish model from maya to/from houdini).
|
||||
|
||||
It's considered to support multiple representations in the future.
|
||||
"""
|
||||
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.lib import BoolDef
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
|
||||
class CreateModel(plugin.HoudiniCreator):
|
||||
"""Create Model"""
|
||||
identifier = "io.openpype.creators.houdini.model"
|
||||
label = "Model"
|
||||
product_type = "model"
|
||||
icon = "cube"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "alembic"})
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
creator_attributes["farm"] = pre_create_data["farm"]
|
||||
|
||||
instance = super(CreateModel, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
instance_node = hou.node(instance.get("instance_node"))
|
||||
parms = {
|
||||
"use_sop_path": True,
|
||||
"build_from_path": True,
|
||||
"path_attrib": "path",
|
||||
"prim_to_detail_pattern": "cbId",
|
||||
"format": 2,
|
||||
"facesets": 0,
|
||||
"filename": hou.text.expandString(
|
||||
"$HIP/pyblish/{}.abc".format(product_name))
|
||||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
selected_node = self.selected_nodes[0]
|
||||
|
||||
# Although Houdini allows ObjNode path on `sop_path` for the
|
||||
# the ROP node we prefer it set to the SopNode path explicitly
|
||||
|
||||
# Allow sop level paths (e.g. /obj/geo1/box1)
|
||||
if isinstance(selected_node, hou.SopNode):
|
||||
parms["sop_path"] = selected_node.path()
|
||||
self.log.debug(
|
||||
"Valid SopNode selection, 'SOP Path' in ROP will be set to '%s'."
|
||||
% selected_node.path()
|
||||
)
|
||||
|
||||
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
|
||||
# but do not allow other object level nodes types like cameras, etc.
|
||||
elif isinstance(selected_node, hou.ObjNode) and \
|
||||
selected_node.type().name() in ["geo"]:
|
||||
|
||||
# get the output node with the minimum
|
||||
# 'outputidx' or the node with display flag
|
||||
sop_path = self.get_obj_output(selected_node)
|
||||
|
||||
if sop_path:
|
||||
parms["sop_path"] = sop_path.path()
|
||||
self.log.debug(
|
||||
"Valid ObjNode selection, 'SOP Path' in ROP will be set to "
|
||||
"the child path '%s'."
|
||||
% sop_path.path()
|
||||
)
|
||||
|
||||
if not parms.get("sop_path", None):
|
||||
self.log.debug(
|
||||
"Selection isn't valid. 'SOP Path' in ROP will be empty."
|
||||
)
|
||||
else:
|
||||
self.log.debug(
|
||||
"No Selection. 'SOP Path' in ROP will be empty."
|
||||
)
|
||||
|
||||
instance_node.setParms(parms)
|
||||
instance_node.parm("trange").set(1)
|
||||
|
||||
# Explicitly set f1 and f2 to frame start.
|
||||
# Which forces the rop node to export one frame.
|
||||
instance_node.parmTuple('f').deleteAllKeyframes()
|
||||
fstart = int(hou.hscriptExpression("$FSTART"))
|
||||
instance_node.parmTuple('f').set((fstart, fstart, 1))
|
||||
|
||||
# Lock any parameters in this list
|
||||
to_lock = ["prim_to_detail_pattern"]
|
||||
self.lock_parameters(instance_node, to_lock)
|
||||
|
||||
def get_network_categories(self):
|
||||
return [
|
||||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_obj_output(self, obj_node):
|
||||
"""Find output node with the smallest 'outputidx'."""
|
||||
|
||||
outputs = obj_node.subnetOutputs()
|
||||
|
||||
# if obj_node is empty
|
||||
if not outputs:
|
||||
return
|
||||
|
||||
# if obj_node has one output child whether its
|
||||
# sop output node or a node with the render flag
|
||||
elif len(outputs) == 1:
|
||||
return outputs[0]
|
||||
|
||||
# if there are more than one, then it have multiple output nodes
|
||||
# return the one with the minimum 'outputidx'
|
||||
else:
|
||||
return min(outputs,
|
||||
key=lambda node: node.evalParm('outputidx'))
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=False)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super().get_pre_create_attr_defs()
|
||||
# Use same attributes as for instance attributes
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
@ -17,17 +17,21 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
ext = "exr"
|
||||
multi_layered_mode = "No Multi-Layered EXR File"
|
||||
|
||||
# Default to split export and render jobs
|
||||
split_render = True
|
||||
# Default render target
|
||||
render_target = "farm_split"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "Redshift_ROP"})
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 10
|
||||
# Submit for job publishing
|
||||
instance_data["farm"] = pre_create_data.get("farm")
|
||||
|
||||
instance = super(CreateRedshiftROP, self).create(
|
||||
product_name,
|
||||
|
|
@ -99,7 +103,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
rs_filepath = f"{export_dir}{product_name}/{product_name}.$F4.rs"
|
||||
parms["RS_archive_file"] = rs_filepath
|
||||
|
||||
if pre_create_data.get("split_render", self.split_render):
|
||||
if pre_create_data.get("render_target") == "farm_split":
|
||||
parms["RS_archive_enable"] = 1
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
|
@ -118,24 +122,44 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
|
||||
return super(CreateRedshiftROP, self).remove_instances(instances)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
"farm_split": "Farm Rendering - Split export & render jobs",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateRedshiftROP, self).get_pre_create_attr_defs()
|
||||
|
||||
image_format_enum = [
|
||||
"exr", "tif", "jpg", "png",
|
||||
]
|
||||
|
||||
multi_layered_mode = [
|
||||
"No Multi-Layered EXR File",
|
||||
"Full Multi-Layered EXR File"
|
||||
]
|
||||
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=True),
|
||||
BoolDef("split_render",
|
||||
label="Split export and render jobs",
|
||||
default=self.split_render),
|
||||
attrs = super(CreateRedshiftROP, self).get_pre_create_attr_defs()
|
||||
attrs += [
|
||||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default=self.ext,
|
||||
|
|
@ -143,5 +167,6 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
EnumDef("multi_layered_mode",
|
||||
multi_layered_mode,
|
||||
default=self.multi_layered_mode,
|
||||
label="Multi-Layered EXR")
|
||||
label="Multi-Layered EXR"),
|
||||
]
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
|
|
@ -16,17 +16,21 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
icon = "magic"
|
||||
ext = "exr"
|
||||
|
||||
# Default to split export and render jobs
|
||||
export_job = True
|
||||
# Default render target
|
||||
render_target = "farm_split"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# Transfer settings from pre create to instance
|
||||
creator_attributes = instance_data.setdefault(
|
||||
"creator_attributes", dict())
|
||||
for key in ["render_target", "review"]:
|
||||
if key in pre_create_data:
|
||||
creator_attributes[key] = pre_create_data[key]
|
||||
|
||||
instance_data.pop("active", None)
|
||||
instance_data.update({"node_type": "vray_renderer"})
|
||||
# Add chunk size attribute
|
||||
instance_data["chunkSize"] = 10
|
||||
# Submit for job publishing
|
||||
instance_data["farm"] = pre_create_data.get("farm")
|
||||
|
||||
instance = super(CreateVrayROP, self).create(
|
||||
product_name,
|
||||
|
|
@ -55,7 +59,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
"SettingsEXR_bits_per_channel": "16" # half precision
|
||||
}
|
||||
|
||||
if pre_create_data.get("export_job"):
|
||||
if pre_create_data.get("render_target") == "farm_split":
|
||||
scene_filepath = \
|
||||
"{export_dir}{product_name}/{product_name}.$F4.vrscene".format(
|
||||
export_dir=hou.text.expandString("$HIP/pyblish/vrscene/"),
|
||||
|
|
@ -143,20 +147,41 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
|
||||
return super(CreateVrayROP, self).remove_instances(instances)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
"""get instance attribute definitions.
|
||||
|
||||
Attributes defined in this method are exposed in
|
||||
publish tab in the publisher UI.
|
||||
"""
|
||||
|
||||
|
||||
render_target_items = {
|
||||
"local": "Local machine rendering",
|
||||
"local_no_render": "Use existing frames (local)",
|
||||
"farm": "Farm Rendering",
|
||||
"farm_split": "Farm Rendering - Split export & render jobs",
|
||||
}
|
||||
|
||||
return [
|
||||
BoolDef("review",
|
||||
label="Review",
|
||||
tooltip="Mark as reviewable",
|
||||
default=True),
|
||||
EnumDef("render_target",
|
||||
items=render_target_items,
|
||||
label="Render target",
|
||||
default=self.render_target)
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
attrs = super(CreateVrayROP, self).get_pre_create_attr_defs()
|
||||
image_format_enum = [
|
||||
"bmp", "cin", "exr", "jpg", "pic", "pic.gz", "png",
|
||||
"rad", "rat", "rta", "sgi", "tga", "tif",
|
||||
]
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
label="Submitting to Farm",
|
||||
default=True),
|
||||
BoolDef("export_job",
|
||||
label="Split export and render jobs",
|
||||
default=self.export_job),
|
||||
attrs = super(CreateVrayROP, self).get_pre_create_attr_defs()
|
||||
|
||||
attrs += [
|
||||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default=self.ext,
|
||||
|
|
@ -172,3 +197,4 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
"if enabled",
|
||||
default=False)
|
||||
]
|
||||
return attrs + self.get_instance_attr_defs()
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
|
|||
# write workfile information to context container.
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
op_ctx = self.host.create_context_node()
|
||||
|
||||
workfile_data = {"workfile": current_instance.data_to_store()}
|
||||
imprint(op_ctx, workfile_data)
|
||||
|
|
|
|||
|
|
@ -40,12 +40,9 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
default_prefix = evalParmNoFrame(rop, "ar_picture")
|
||||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("ar_ass_export_enable").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
if instance.data["splitRender"]:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "ar_ass_file", pad_character="0"
|
||||
)
|
||||
|
|
@ -68,7 +65,12 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
"": self.generate_expected_files(instance, beauty_product)
|
||||
}
|
||||
|
||||
# Assume it's a multipartExr Render.
|
||||
multipartExr = True
|
||||
|
||||
num_aovs = rop.evalParm("ar_aovs")
|
||||
# TODO: Check the following logic.
|
||||
# as it always assumes that all AOV are not merged.
|
||||
for index in range(1, num_aovs + 1):
|
||||
# Skip disabled AOVs
|
||||
if not rop.evalParm("ar_enable_aov{}".format(index)):
|
||||
|
|
@ -85,6 +87,14 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
files_by_aov[label] = self.generate_expected_files(instance,
|
||||
aov_product)
|
||||
|
||||
# Set to False as soon as we have a separated aov.
|
||||
multipartExr = False
|
||||
|
||||
# Review Logic expects this key to exist and be True
|
||||
# if render is a multipart Exr.
|
||||
# As long as we have one AOV then multipartExr should be True.
|
||||
instance.data["multipartExr"] = multipartExr
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: {}".format(product))
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class CollectDataforCache(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.11
|
||||
families = ["ass", "pointcache",
|
||||
"mantraifd", "redshiftproxy",
|
||||
"vdbcache"]
|
||||
"vdbcache", "model"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Data for Cache"
|
||||
|
|
@ -43,10 +43,7 @@ class CollectDataforCache(pyblish.api.InstancePlugin):
|
|||
cache_files = {"_": instance.data["files"]}
|
||||
# Convert instance family to pointcache if it is bgeo or abc
|
||||
# because ???
|
||||
for family in instance.data["families"]:
|
||||
if family == "bgeo" or "abc":
|
||||
instance.data["productType"] = "pointcache"
|
||||
break
|
||||
self.log.debug(instance.data["families"])
|
||||
instance.data.update({
|
||||
"plugin": "Houdini",
|
||||
"publish": True
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class CollectChunkSize(pyblish.api.InstancePlugin,
|
|||
order = pyblish.api.CollectorOrder + 0.05
|
||||
families = ["ass", "pointcache",
|
||||
"vdbcache", "mantraifd",
|
||||
"redshiftproxy"]
|
||||
"redshiftproxy", "model"]
|
||||
hosts = ["houdini"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect Chunk Size"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFarmInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect instances for farm render."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ["mantra_rop",
|
||||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
|
||||
hosts = ["houdini"]
|
||||
targets = ["local", "remote"]
|
||||
label = "Collect farm instances"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
creator_attribute = instance.data["creator_attributes"]
|
||||
|
||||
# Collect Render Target
|
||||
if creator_attribute.get("render_target") not in {
|
||||
"farm_split", "farm"
|
||||
}:
|
||||
instance.data["farm"] = False
|
||||
instance.data["splitRender"] = False
|
||||
self.log.debug("Render on farm is disabled. "
|
||||
"Skipping farm collecting.")
|
||||
return
|
||||
|
||||
instance.data["farm"] = True
|
||||
instance.data["splitRender"] = (
|
||||
creator_attribute.get("render_target") == "farm_split"
|
||||
)
|
||||
|
|
@ -1,21 +1,24 @@
|
|||
"""Collector for pointcache types.
|
||||
"""Collector for different types.
|
||||
|
||||
This will add additional family to pointcache instance based on
|
||||
This will add additional families to different instance based on
|
||||
the creator_identifier parameter.
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectPointcacheType(pyblish.api.InstancePlugin):
|
||||
"""Collect data type for pointcache instance."""
|
||||
"""Collect data type for different instances."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["pointcache"]
|
||||
label = "Collect type of pointcache"
|
||||
families = ["pointcache", "model"]
|
||||
label = "Collect instances types"
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data["creator_identifier"] == "io.openpype.creators.houdini.bgeo": # noqa: E501
|
||||
instance.data["families"] += ["bgeo"]
|
||||
elif instance.data["creator_identifier"] == "io.openpype.creators.houdini.pointcache": # noqa: E501
|
||||
elif instance.data["creator_identifier"] in {
|
||||
"io.openpype.creators.houdini.pointcache",
|
||||
"io.openpype.creators.houdini.model"
|
||||
}:
|
||||
instance.data["families"] += ["abc"]
|
||||
|
|
@ -55,6 +55,12 @@ class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
beauty_product)
|
||||
}
|
||||
|
||||
# Review Logic expects this key to exist and be True
|
||||
# if render is a multipart Exr.
|
||||
# As long as we have one AOV then multipartExr should be True.
|
||||
# By default karma render is a multipart Exr.
|
||||
instance.data["multipartExr"] = True
|
||||
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,137 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
from ayon_core.pipeline.farm.patterning import match_aov_pattern
|
||||
from ayon_core.pipeline.publish import (
|
||||
get_plugin_settings,
|
||||
apply_plugin_settings_automatically
|
||||
)
|
||||
|
||||
|
||||
class CollectLocalRenderInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect instances for local render.
|
||||
|
||||
Agnostic Local Render Collector.
|
||||
"""
|
||||
|
||||
# this plugin runs after Collect Render Products
|
||||
order = pyblish.api.CollectorOrder + 0.12
|
||||
families = ["mantra_rop",
|
||||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
|
||||
hosts = ["houdini"]
|
||||
label = "Collect local render instances"
|
||||
|
||||
use_deadline_aov_filter = False
|
||||
aov_filter = {"host_name": "houdini",
|
||||
"value": [".*([Bb]eauty).*"]}
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# Preserve automatic settings applying logic
|
||||
settings = get_plugin_settings(plugin=cls,
|
||||
project_settings=project_settings,
|
||||
log=cls.log,
|
||||
category="houdini")
|
||||
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
|
||||
|
||||
if not cls.use_deadline_aov_filter:
|
||||
# get aov_filter from collector settings
|
||||
# and restructure it as match_aov_pattern requires.
|
||||
cls.aov_filter = {
|
||||
cls.aov_filter["host_name"]: cls.aov_filter["value"]
|
||||
}
|
||||
else:
|
||||
# get aov_filter from deadline settings
|
||||
cls.aov_filter = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"]["aov_filter"]
|
||||
cls.aov_filter = {
|
||||
item["name"]: item["value"]
|
||||
for item in cls.aov_filter
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if instance.data["farm"]:
|
||||
self.log.debug("Render on farm is enabled. "
|
||||
"Skipping local render collecting.")
|
||||
return
|
||||
|
||||
# Create Instance for each AOV.
|
||||
context = instance.context
|
||||
expectedFiles = next(iter(instance.data["expectedFiles"]), {})
|
||||
|
||||
product_type = "render" # is always render
|
||||
product_group = get_product_name(
|
||||
context.data["projectName"],
|
||||
context.data["taskEntity"]["name"],
|
||||
context.data["taskEntity"]["taskType"],
|
||||
context.data["hostName"],
|
||||
product_type,
|
||||
instance.data["productName"]
|
||||
)
|
||||
|
||||
for aov_name, aov_filepaths in expectedFiles.items():
|
||||
product_name = product_group
|
||||
|
||||
if aov_name:
|
||||
product_name = "{}_{}".format(product_name, aov_name)
|
||||
|
||||
# Create instance for each AOV
|
||||
aov_instance = context.create_instance(product_name)
|
||||
|
||||
# Prepare Representation for each AOV
|
||||
aov_filenames = [os.path.basename(path) for path in aov_filepaths]
|
||||
staging_dir = os.path.dirname(aov_filepaths[0])
|
||||
ext = aov_filepaths[0].split(".")[-1]
|
||||
|
||||
# Decide if instance is reviewable
|
||||
preview = False
|
||||
if instance.data.get("multipartExr", False):
|
||||
# Add preview tag because its multipartExr.
|
||||
preview = True
|
||||
else:
|
||||
# Add Preview tag if the AOV matches the filter.
|
||||
preview = match_aov_pattern(
|
||||
"houdini", self.aov_filter, aov_filenames[0]
|
||||
)
|
||||
|
||||
preview = preview and instance.data.get("review", False)
|
||||
|
||||
# Support Single frame.
|
||||
# The integrator wants single files to be a single
|
||||
# filename instead of a list.
|
||||
# More info: https://github.com/ynput/ayon-core/issues/238
|
||||
if len(aov_filenames) == 1:
|
||||
aov_filenames = aov_filenames[0]
|
||||
|
||||
aov_instance.data.update({
|
||||
# 'label': label,
|
||||
"task": instance.data["task"],
|
||||
"folderPath": instance.data["folderPath"],
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"],
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"productName": product_name,
|
||||
"productGroup": product_group,
|
||||
"families": ["render.local.hou", "review"],
|
||||
"instance_node": instance.data["instance_node"],
|
||||
"representations": [
|
||||
{
|
||||
"stagingDir": staging_dir,
|
||||
"ext": ext,
|
||||
"name": ext,
|
||||
"tags": ["review"] if preview else [],
|
||||
"files": aov_filenames,
|
||||
"frameStart": instance.data["frameStartHandle"],
|
||||
"frameEnd": instance.data["frameEndHandle"]
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
# Skip integrating original render instance.
|
||||
# We are not removing it because it's used to trigger the render.
|
||||
instance.data["integrate"] = False
|
||||
|
|
@ -44,12 +44,9 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
if instance.data["splitRender"]:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
|
|
@ -74,6 +71,11 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
beauty_product)
|
||||
}
|
||||
|
||||
# Assume it's a multipartExr Render.
|
||||
multipartExr = True
|
||||
|
||||
# TODO: This logic doesn't take into considerations
|
||||
# cryptomatte defined in 'Images > Cryptomatte'
|
||||
aov_numbers = rop.evalParm("vm_numaux")
|
||||
if aov_numbers > 0:
|
||||
# get the filenames of the AOVs
|
||||
|
|
@ -93,6 +95,14 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||
|
||||
# Set to False as soon as we have a separated aov.
|
||||
multipartExr = False
|
||||
|
||||
# Review Logic expects this key to exist and be True
|
||||
# if render is a multipart Exr.
|
||||
# As long as we have one AOV then multipartExr should be True.
|
||||
instance.data["multipartExr"] = multipartExr
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,8 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
|
|||
"usd",
|
||||
"usdrender",
|
||||
"redshiftproxy",
|
||||
"staticMesh"
|
||||
"staticMesh",
|
||||
"model"
|
||||
]
|
||||
|
||||
hosts = ["houdini"]
|
||||
|
|
|
|||
|
|
@ -42,11 +42,9 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix")
|
||||
beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix")
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("RS_archive_enable").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
|
||||
export_products = []
|
||||
if split_render:
|
||||
if instance.data["splitRender"]:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "RS_archive_file", pad_character="0"
|
||||
)
|
||||
|
|
@ -63,9 +61,12 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2")
|
||||
if full_exr_mode:
|
||||
# Ignore beauty suffix if full mode is enabled
|
||||
# As this is what the rop does.
|
||||
# As this is what the rop does.
|
||||
beauty_suffix = ""
|
||||
|
||||
# Assume it's a multipartExr Render.
|
||||
multipartExr = True
|
||||
|
||||
# Default beauty/main layer AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=beauty_suffix
|
||||
|
|
@ -75,7 +76,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
beauty_suffix: self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
|
||||
aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode()
|
||||
if aovs_rop:
|
||||
rop = aovs_rop
|
||||
|
|
@ -98,13 +99,21 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \
|
||||
not full_exr_mode:
|
||||
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
|
||||
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||
aov_product) # noqa
|
||||
|
||||
# Set to False as soon as we have a separated aov.
|
||||
multipartExr = False
|
||||
|
||||
# Review Logic expects this key to exist and be True
|
||||
# if render is a multipart Exr.
|
||||
# As long as we have one AOV then multipartExr should be True.
|
||||
instance.data["multipartExr"] = multipartExr
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
label = "Collect Review Data"
|
||||
# This specific order value is used so that
|
||||
# this plugin runs after CollectRopFrameRange
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
# Also after CollectLocalRenderInstances
|
||||
order = pyblish.api.CollectorOrder + 0.13
|
||||
hosts = ["houdini"]
|
||||
families = ["review"]
|
||||
|
||||
|
|
@ -28,7 +29,8 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
ropnode_path = instance.data["instance_node"]
|
||||
ropnode = hou.node(ropnode_path)
|
||||
|
||||
camera_path = ropnode.parm("camera").eval()
|
||||
# Get camera based on the instance_node type.
|
||||
camera_path = self._get_camera_path(ropnode)
|
||||
camera_node = hou.node(camera_path)
|
||||
if not camera_node:
|
||||
self.log.warning("No valid camera node found on review node: "
|
||||
|
|
@ -55,3 +57,29 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
# Store focal length in `burninDataMembers`
|
||||
burnin_members = instance.data.setdefault("burninDataMembers", {})
|
||||
burnin_members["focalLength"] = focal_length
|
||||
|
||||
def _get_camera_path(self, ropnode):
|
||||
"""Get the camera path associated with the given rop node.
|
||||
|
||||
This function evaluates the camera parameter according to the
|
||||
type of the given rop node.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Camera path or None.
|
||||
|
||||
This function can return empty string if the camera
|
||||
path is empty i.e. no camera path.
|
||||
"""
|
||||
|
||||
if ropnode.type().name() in {
|
||||
"opengl", "karma", "ifd", "arnold"
|
||||
}:
|
||||
return ropnode.parm("camera").eval()
|
||||
|
||||
elif ropnode.type().name() == "Redshift_ROP":
|
||||
return ropnode.parm("RS_renderCamera").eval()
|
||||
|
||||
elif ropnode.type().name() == "vray_renderer":
|
||||
return ropnode.parm("render_camera").eval()
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectReviewableInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect Reviewable Instances.
|
||||
|
||||
Basically, all instances of the specified families
|
||||
with creator_attribure["review"]
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Reviewable Instances"
|
||||
families = ["mantra_rop",
|
||||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
|
||||
def process(self, instance):
|
||||
creator_attribute = instance.data["creator_attributes"]
|
||||
|
||||
instance.data["review"] = creator_attribute.get("review", False)
|
||||
|
|
@ -45,12 +45,9 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
render_products = []
|
||||
# TODO: add render elements if render element
|
||||
|
||||
# Store whether we are splitting the render job in an export + render
|
||||
split_render = rop.parm("render_export_mode").eval() == "2"
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
if instance.data["splitRender"]:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "render_export_filepath", pad_character="0"
|
||||
)
|
||||
|
|
@ -70,6 +67,9 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
"": self.generate_expected_files(instance,
|
||||
beauty_product)}
|
||||
|
||||
# Assume it's a multipartExr Render.
|
||||
multipartExr = True
|
||||
|
||||
if instance.data.get("RenderElement", True):
|
||||
render_element = self.get_render_element_name(rop, default_prefix)
|
||||
if render_element:
|
||||
|
|
@ -77,7 +77,13 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
render_products.append(renderpass)
|
||||
files_by_aov[aov] = self.generate_expected_files(
|
||||
instance, renderpass)
|
||||
# Set to False as soon as we have a separated aov.
|
||||
multipartExr = False
|
||||
|
||||
# Review Logic expects this key to exist and be True
|
||||
# if render is a multipart Exr.
|
||||
# As long as we have one AOV then multipartExr should be True.
|
||||
instance.data["multipartExr"] = multipartExr
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,16 @@ class ExtractOpenGL(publish.Extractor,
|
|||
def process(self, instance):
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# This plugin is triggered when marking render as reviewable.
|
||||
# Therefore, this plugin will run on over wrong instances.
|
||||
# TODO: Don't run this plugin on wrong instances.
|
||||
# This plugin should run only on review product type
|
||||
# with instance node of opengl type.
|
||||
if ropnode.type().name() != "opengl":
|
||||
self.log.debug("Skipping OpenGl extraction. Rop node {} "
|
||||
"is not an OpenGl node.".format(ropnode.path()))
|
||||
return
|
||||
|
||||
output = ropnode.evalParm("picture")
|
||||
staging_dir = os.path.normpath(os.path.dirname(output))
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
|
|
|||
|
|
@ -0,0 +1,74 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.houdini.api.lib import render_rop
|
||||
import hou
|
||||
import os
|
||||
|
||||
|
||||
class ExtractRender(publish.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Render"
|
||||
hosts = ["houdini"]
|
||||
families = ["mantra_rop",
|
||||
"karma_rop",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop"]
|
||||
|
||||
def process(self, instance):
|
||||
creator_attribute = instance.data["creator_attributes"]
|
||||
product_type = instance.data["productType"]
|
||||
rop_node = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# Align split parameter value on rop node to the render target.
|
||||
if instance.data["splitRender"]:
|
||||
if product_type == "arnold_rop":
|
||||
rop_node.setParms({"ar_ass_export_enable": 1})
|
||||
elif product_type == "mantra_rop":
|
||||
rop_node.setParms({"soho_outputmode": 1})
|
||||
elif product_type == "redshift_rop":
|
||||
rop_node.setParms({"RS_archive_enable": 1})
|
||||
elif product_type == "vray_rop":
|
||||
rop_node.setParms({"render_export_mode": "2"})
|
||||
else:
|
||||
if product_type == "arnold_rop":
|
||||
rop_node.setParms({"ar_ass_export_enable": 0})
|
||||
elif product_type == "mantra_rop":
|
||||
rop_node.setParms({"soho_outputmode": 0})
|
||||
elif product_type == "redshift_rop":
|
||||
rop_node.setParms({"RS_archive_enable": 0})
|
||||
elif product_type == "vray_rop":
|
||||
rop_node.setParms({"render_export_mode": "1"})
|
||||
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Render should be processed on farm, skipping local render.")
|
||||
return
|
||||
|
||||
if creator_attribute.get("render_target") == "local":
|
||||
ropnode = hou.node(instance.data.get("instance_node"))
|
||||
render_rop(ropnode)
|
||||
|
||||
# `ExpectedFiles` is a list that includes one dict.
|
||||
expected_files = instance.data["expectedFiles"][0]
|
||||
# Each key in that dict is a list of files.
|
||||
# Combine lists of files into one big list.
|
||||
all_frames = []
|
||||
for value in expected_files.values():
|
||||
if isinstance(value, str):
|
||||
all_frames.append(value)
|
||||
elif isinstance(value, list):
|
||||
all_frames.extend(value)
|
||||
# Check missing frames.
|
||||
# Frames won't exist if user cancels the render.
|
||||
missing_frames = [
|
||||
frame
|
||||
for frame in all_frames
|
||||
if not os.path.exists(frame)
|
||||
]
|
||||
if missing_frames:
|
||||
# TODO: Use user friendly error reporting.
|
||||
raise RuntimeError("Failed to complete render extraction. "
|
||||
"Missing output files: {}".format(
|
||||
missing_frames))
|
||||
|
|
@ -17,11 +17,13 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["houdini"]
|
||||
families = ["workfile",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"usdrender",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"vray_rop",
|
||||
"render.local.hou",
|
||||
"publish.hou"]
|
||||
optional = True
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validator for checking that export is a single frame."""
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish import ValidateContentsOrder
|
||||
from ayon_core.hosts.houdini.api.action import SelectInvalidAction
|
||||
|
||||
|
||||
class ValidateSingleFrame(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate Export is a Single Frame.
|
||||
|
||||
It checks if rop node is exporting one frame.
|
||||
This is mainly for Model product type.
|
||||
"""
|
||||
|
||||
families = ["model"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Single Frame"
|
||||
order = ValidateContentsOrder + 0.1
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
nodes = [n.path() for n in invalid]
|
||||
raise PublishValidationError(
|
||||
"See log for details. "
|
||||
"Invalid nodes: {0}".format(nodes)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid = []
|
||||
|
||||
frame_start = instance.data.get("frameStartHandle")
|
||||
frame_end = instance.data.get("frameEndHandle")
|
||||
|
||||
# This happens if instance node has no 'trange' parameter.
|
||||
if frame_start is None or frame_end is None:
|
||||
cls.log.debug(
|
||||
"No frame data, skipping check.."
|
||||
)
|
||||
return
|
||||
|
||||
if frame_start != frame_end:
|
||||
invalid.append(instance.data["instance_node"])
|
||||
cls.log.error(
|
||||
"Invalid frame range on '%s'."
|
||||
"You should use the same frame number for 'f1' "
|
||||
"and 'f2' parameters.",
|
||||
instance.data["instance_node"].path()
|
||||
)
|
||||
|
||||
return invalid
|
||||
|
|
@ -16,9 +16,13 @@ class ValidateMeshIsStatic(pyblish.api.InstancePlugin,
|
|||
"""Validate mesh is static.
|
||||
|
||||
It checks if output node is time dependent.
|
||||
this avoids getting different output from ROP node when extracted
|
||||
from a different frame than the first frame.
|
||||
(Might be overly restrictive though)
|
||||
"""
|
||||
|
||||
families = ["staticMesh"]
|
||||
families = ["staticMesh",
|
||||
"model"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Mesh is Static"
|
||||
order = ValidateContentsOrder + 0.1
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
|
|||
"""Validate Create Intermediate Directories is enabled on ROP node."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "camera", "vdbcache"]
|
||||
families = ["pointcache", "camera", "vdbcache", "model"]
|
||||
hosts = ["houdini"]
|
||||
label = "Create Intermediate Directories Checked"
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,18 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin,
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
|
||||
# This plugin is triggered when marking render as reviewable.
|
||||
# Therefore, this plugin will run on over wrong instances.
|
||||
# TODO: Don't run this plugin on wrong instances.
|
||||
# This plugin should run only on review product type
|
||||
# with instance node of opengl type.
|
||||
if rop_node.type().name() != "opengl":
|
||||
self.log.debug("Skipping Validation. Rop node {} "
|
||||
"is not an OpenGl node.".format(rop_node.path()))
|
||||
return
|
||||
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
|
|
@ -66,7 +78,6 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin,
|
|||
)
|
||||
return
|
||||
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
if rop_node.evalParm("colorcorrect") != 2:
|
||||
# any colorspace settings other than default requires
|
||||
# 'Color Correct' parm to be set to 'OpenColorIO'
|
||||
|
|
|
|||
|
|
@ -20,6 +20,16 @@ class ValidateSceneReview(pyblish.api.InstancePlugin):
|
|||
report = []
|
||||
instance_node = hou.node(instance.data.get("instance_node"))
|
||||
|
||||
# This plugin is triggered when marking render as reviewable.
|
||||
# Therefore, this plugin will run on over wrong instances.
|
||||
# TODO: Don't run this plugin on wrong instances.
|
||||
# This plugin should run only on review product type
|
||||
# with instance node of opengl type.
|
||||
if instance_node.type().name() != "opengl":
|
||||
self.log.debug("Skipping Validation. Rop node {} "
|
||||
"is not an OpenGl node.".format(instance_node.path()))
|
||||
return
|
||||
|
||||
invalid = self.get_invalid_scene_path(instance_node)
|
||||
if invalid:
|
||||
report.append(invalid)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "vdbcache"]
|
||||
families = ["pointcache", "vdbcache", "model"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node (SOP)"
|
||||
actions = [SelectROPAction, SelectInvalidAction]
|
||||
|
|
|
|||
29
client/ayon_core/hosts/houdini/startup/OPmenu.xml
Normal file
29
client/ayon_core/hosts/houdini/startup/OPmenu.xml
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- OPMenu Stencil.
|
||||
It's used to extend the OPMenu.
|
||||
-->
|
||||
|
||||
<menuDocument>
|
||||
<menu>
|
||||
<!-- Operator type and asset options. -->
|
||||
<subMenu id="opmenu.vhda_options_create">
|
||||
<insertBefore>opmenu.unsynchronize</insertBefore>
|
||||
<scriptItem id="opmenu.vhda_create_ayon">
|
||||
<insertAfter>opmenu.vhda_create</insertAfter>
|
||||
<label>Create New (AYON)...</label>
|
||||
<context>
|
||||
</context>
|
||||
<scriptCode>
|
||||
<![CDATA[
|
||||
from ayon_core.hosts.houdini.api.creator_node_shelves import create_interactive
|
||||
|
||||
node = kwargs["node"]
|
||||
if node not in hou.selectedNodes():
|
||||
node.setSelected(True)
|
||||
create_interactive("io.openpype.creators.houdini.hda", **kwargs)
|
||||
]]>
|
||||
</scriptCode>
|
||||
</scriptItem>
|
||||
</subMenu>
|
||||
</menu>
|
||||
</menuDocument>
|
||||
|
|
@ -6,12 +6,9 @@ import json
|
|||
from typing import Any, Dict, Union
|
||||
|
||||
import six
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
get_current_folder_path,
|
||||
get_current_task_name,
|
||||
colorspace
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
|
@ -372,12 +369,8 @@ def reset_colorspace():
|
|||
"""
|
||||
if int(get_max_version()) < 2024:
|
||||
return
|
||||
project_name = get_current_project_name()
|
||||
colorspace_mgr = rt.ColorPipelineMgr
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
max_config_data = colorspace.get_imageio_config(
|
||||
project_name, "max", project_settings)
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data:
|
||||
ocio_config_path = max_config_data["path"]
|
||||
colorspace_mgr = rt.ColorPipelineMgr
|
||||
|
|
@ -392,10 +385,7 @@ def check_colorspace():
|
|||
"because Max main window can't be found.")
|
||||
if int(get_max_version()) >= 2024:
|
||||
color_mgr = rt.ColorPipelineMgr
|
||||
project_name = get_current_project_name()
|
||||
project_settings = get_project_settings(project_name)
|
||||
max_config_data = colorspace.get_imageio_config(
|
||||
project_name, "max", project_settings)
|
||||
max_config_data = colorspace.get_current_context_imageio_config_preset()
|
||||
if max_config_data and color_mgr.Mode != rt.Name("OCIO_Custom"):
|
||||
if not is_headless():
|
||||
from ayon_core.tools.utils import SimplePopup
|
||||
|
|
|
|||
|
|
@ -52,11 +52,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
self._has_been_setup = True
|
||||
|
||||
def context_setting():
|
||||
return lib.set_context_setting()
|
||||
|
||||
rt.callbacks.addScript(rt.Name('systemPostNew'),
|
||||
context_setting)
|
||||
rt.callbacks.addScript(rt.Name('systemPostNew'), on_new)
|
||||
|
||||
rt.callbacks.addScript(rt.Name('filePostOpen'),
|
||||
lib.check_colorspace)
|
||||
|
|
@ -163,6 +159,14 @@ def ls() -> list:
|
|||
yield lib.read(container)
|
||||
|
||||
|
||||
def on_new():
|
||||
lib.set_context_setting()
|
||||
if rt.checkForSave():
|
||||
rt.resetMaxFile(rt.Name("noPrompt"))
|
||||
rt.clearUndoBuffer()
|
||||
rt.redrawViews()
|
||||
|
||||
|
||||
def containerise(name: str, nodes: list, context,
|
||||
namespace=None, loader=None, suffix="_CON"):
|
||||
data = {
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class FBXExtractor:
|
|||
"smoothMesh": bool,
|
||||
"instances": bool,
|
||||
# "referencedContainersContent": bool, # deprecated in Maya 2016+
|
||||
"bakeComplexAnimation": int,
|
||||
"bakeComplexAnimation": bool,
|
||||
"bakeComplexStart": int,
|
||||
"bakeComplexEnd": int,
|
||||
"bakeComplexStep": int,
|
||||
|
|
@ -59,6 +59,7 @@ class FBXExtractor:
|
|||
"constraints": bool,
|
||||
"lights": bool,
|
||||
"embeddedTextures": bool,
|
||||
"includeChildren": bool,
|
||||
"inputConnections": bool,
|
||||
"upAxis": str, # x, y or z,
|
||||
"triangulate": bool,
|
||||
|
|
@ -102,6 +103,7 @@ class FBXExtractor:
|
|||
"constraints": False,
|
||||
"lights": True,
|
||||
"embeddedTextures": False,
|
||||
"includeChildren": True,
|
||||
"inputConnections": True,
|
||||
"upAxis": "y",
|
||||
"triangulate": False,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from ayon_core.lib import (
|
|||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
|
||||
|
||||
def _get_animation_attr_defs(cls):
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from ayon_core.pipeline import (
|
|||
from ayon_core.pipeline.load.utils import get_representation_path_from_context
|
||||
from ayon_core.pipeline.colorspace import (
|
||||
get_imageio_file_rules_colorspace_from_filepath,
|
||||
get_imageio_config,
|
||||
get_current_context_imageio_config_preset,
|
||||
get_imageio_file_rules
|
||||
)
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
|
@ -270,8 +270,7 @@ class FileNodeLoader(load.LoaderPlugin):
|
|||
host_name = get_current_host_name()
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
config_data = get_imageio_config(
|
||||
project_name, host_name,
|
||||
config_data = get_current_context_imageio_config_preset(
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ class ExtractFBXAnimation(publish.Extractor):
|
|||
fbx_exporter = fbx.FBXExtractor(log=self.log)
|
||||
out_members = instance.data.get("animated_skeleton", [])
|
||||
# Export
|
||||
instance.data["constraints"] = True
|
||||
# TODO: need to set up the options for users to set up
|
||||
# the flags they intended to export
|
||||
instance.data["skeletonDefinitions"] = True
|
||||
instance.data["referencedAssetsContent"] = True
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
|
|
|
|||
|
|
@ -1,71 +0,0 @@
|
|||
import pyblish.api
|
||||
import ayon_core.hosts.maya.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate all nodes in skeletonAnim_SET are referenced"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["animation.fbx"]
|
||||
label = "Animated Reference Rig"
|
||||
accepted_controllers = ["transform", "locator"]
|
||||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = False
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
animated_sets = instance.data.get("animated_skeleton", [])
|
||||
if not animated_sets:
|
||||
self.log.debug(
|
||||
"No nodes found in skeletonAnim_SET. "
|
||||
"Skipping validation of animated reference rig..."
|
||||
)
|
||||
return
|
||||
|
||||
for animated_reference in animated_sets:
|
||||
is_referenced = cmds.referenceQuery(
|
||||
animated_reference, isNodeReferenced=True)
|
||||
if not bool(is_referenced):
|
||||
raise PublishValidationError(
|
||||
"All the content in skeletonAnim_SET"
|
||||
" should be referenced nodes"
|
||||
)
|
||||
invalid_controls = self.validate_controls(animated_sets)
|
||||
if invalid_controls:
|
||||
raise PublishValidationError(
|
||||
"All the content in skeletonAnim_SET"
|
||||
" should be transforms"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate_controls(self, set_members):
|
||||
"""Check if the controller set contains only accepted node types.
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the skeleton_anim_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
"""
|
||||
|
||||
# Validate control types
|
||||
invalid = []
|
||||
set_members = cmds.ls(set_members, long=True)
|
||||
for node in set_members:
|
||||
if cmds.nodeType(node) not in self.accepted_controllers:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
|
@ -80,17 +80,21 @@ def get_engine_versions(env=None):
|
|||
def get_editor_exe_path(engine_path: Path, engine_version: str) -> Path:
|
||||
"""Get UE Editor executable path."""
|
||||
ue_path = engine_path / "Engine/Binaries"
|
||||
|
||||
ue_name = "UnrealEditor"
|
||||
|
||||
# handle older versions of Unreal Engine
|
||||
if engine_version.split(".")[0] == "4":
|
||||
ue_name = "UE4Editor"
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
if engine_version.split(".")[0] == "4":
|
||||
ue_path /= "Win64/UE4Editor.exe"
|
||||
elif engine_version.split(".")[0] == "5":
|
||||
ue_path /= "Win64/UnrealEditor.exe"
|
||||
ue_path /= f"Win64/{ue_name}.exe"
|
||||
|
||||
elif platform.system().lower() == "linux":
|
||||
ue_path /= "Linux/UE4Editor"
|
||||
ue_path /= f"Linux/{ue_name}"
|
||||
|
||||
elif platform.system().lower() == "darwin":
|
||||
ue_path /= "Mac/UE4Editor"
|
||||
ue_path /= f"Mac/{ue_name}"
|
||||
|
||||
return ue_path
|
||||
|
||||
|
|
|
|||
|
|
@ -139,6 +139,7 @@ from .path_tools import (
|
|||
)
|
||||
|
||||
from .ayon_info import (
|
||||
is_in_ayon_launcher_process,
|
||||
is_running_from_build,
|
||||
is_using_ayon_console,
|
||||
is_staging_enabled,
|
||||
|
|
@ -248,6 +249,7 @@ __all__ = [
|
|||
|
||||
"Logger",
|
||||
|
||||
"is_in_ayon_launcher_process",
|
||||
"is_running_from_build",
|
||||
"is_using_ayon_console",
|
||||
"is_staging_enabled",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import platform
|
||||
|
|
@ -25,6 +26,18 @@ def get_ayon_launcher_version():
|
|||
return content["__version__"]
|
||||
|
||||
|
||||
def is_in_ayon_launcher_process():
|
||||
"""Determine if current process is running from AYON launcher.
|
||||
|
||||
Returns:
|
||||
bool: True if running from AYON launcher.
|
||||
|
||||
"""
|
||||
ayon_executable_path = os.path.normpath(os.environ["AYON_EXECUTABLE"])
|
||||
executable_path = os.path.normpath(sys.executable)
|
||||
return ayon_executable_path == executable_path
|
||||
|
||||
|
||||
def is_running_from_build():
|
||||
"""Determine if current process is running from build or code.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
from .clockify_module import ClockifyModule
|
||||
|
||||
__all__ = (
|
||||
"ClockifyModule",
|
||||
)
|
||||
|
|
@ -29,15 +29,11 @@ from ayon_core.pipeline.publish.lib import (
|
|||
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
|
||||
|
||||
|
||||
# TODO both 'requests_post' and 'requests_get' should not set 'verify' based
|
||||
# on environment variable. This should be done in a more controlled way,
|
||||
# e.g. each deadline url could have checkbox to enabled/disable
|
||||
# ssl verification.
|
||||
def requests_post(*args, **kwargs):
|
||||
"""Wrap request post method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline server is
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
|
|
@ -46,10 +42,6 @@ def requests_post(*args, **kwargs):
|
|||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth) # explicit cast to tuple
|
||||
|
|
@ -61,8 +53,8 @@ def requests_post(*args, **kwargs):
|
|||
def requests_get(*args, **kwargs):
|
||||
"""Wrap request get method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline server is
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
|
|
@ -71,9 +63,6 @@ def requests_get(*args, **kwargs):
|
|||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
|
||||
True) else True # noqa
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth)
|
||||
|
|
@ -466,7 +455,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.aux_files = self.get_aux_files()
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
job_id = self.process_submission(auth)
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
job_id = self.process_submission(auth, verify)
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
|
|
@ -479,10 +469,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
render_job_id = self.submit(payload, auth)
|
||||
render_job_id = self.submit(payload, auth, verify)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self, auth=None):
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
|
|
@ -493,7 +483,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload, auth)
|
||||
return self.submit(payload, auth, verify)
|
||||
|
||||
@abstractmethod
|
||||
def get_job_info(self):
|
||||
|
|
@ -583,7 +573,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"AuxFiles": aux_files or self.aux_files
|
||||
}
|
||||
|
||||
def submit(self, payload, auth):
|
||||
def submit(self, payload, auth, verify):
|
||||
"""Submit payload to Deadline API end-point.
|
||||
|
||||
This takes payload in the form of JSON file and POST it to
|
||||
|
|
@ -592,6 +582,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
Args:
|
||||
payload (dict): dict to become json in deadline submission.
|
||||
auth (tuple): (username, password)
|
||||
verify (bool): verify SSL certificate if present
|
||||
|
||||
Returns:
|
||||
str: resulting Deadline job id.
|
||||
|
|
@ -601,8 +592,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
response = requests_post(url, json=payload,
|
||||
auth=auth)
|
||||
response = requests_post(
|
||||
url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
|
|
|
|||
|
|
@ -26,27 +26,32 @@ class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.420
|
||||
label = "Collect Deadline Pools"
|
||||
hosts = ["aftereffects",
|
||||
"fusion",
|
||||
"harmony"
|
||||
"nuke",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini"]
|
||||
hosts = [
|
||||
"aftereffects",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini",
|
||||
"nuke",
|
||||
]
|
||||
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou"]
|
||||
families = [
|
||||
"render",
|
||||
"prerender",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou",
|
||||
]
|
||||
|
||||
primary_pool = None
|
||||
secondary_pool = None
|
||||
|
|
|
|||
|
|
@ -82,7 +82,10 @@ class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
|||
)
|
||||
instance.data["deadline"]["auth"] = None
|
||||
|
||||
if not require_authentication:
|
||||
instance.data["deadline"]["verify"] = (
|
||||
not deadline_info["not_verify_ssl"])
|
||||
|
||||
if not deadline_info["require_authentication"]:
|
||||
return
|
||||
# TODO import 'get_addon_site_settings' when available
|
||||
# in public 'ayon_api'
|
||||
|
|
|
|||
|
|
@ -174,8 +174,9 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
return self.submit(payload,
|
||||
auth=instance.data["deadline"]["auth"])
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth=auth, verify=verify)
|
||||
|
||||
def from_published_scene(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -193,9 +193,11 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url, json=payload,
|
||||
auth=instance.data["deadline"]["require_authentication"])
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
self.log.error(
|
||||
|
|
|
|||
|
|
@ -242,7 +242,8 @@ class FusionSubmitDeadline(
|
|||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, auth=auth)
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ class HoudiniSubmitDeadline(
|
|||
priority = 50
|
||||
chunk_size = 1
|
||||
group = ""
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
|
|
@ -188,7 +188,7 @@ class HoudiniSubmitDeadline(
|
|||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
|
||||
if split_render_job and is_export_job:
|
||||
job_info.Priority = attribute_values.get(
|
||||
"export_priority", self.export_priority
|
||||
|
|
@ -309,6 +309,11 @@ class HoudiniSubmitDeadline(
|
|||
return attr.asdict(plugin_info)
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data["farm"]:
|
||||
self.log.debug("Render on farm is disabled. "
|
||||
"Skipping deadline submission.")
|
||||
return
|
||||
|
||||
super(HoudiniSubmitDeadline, self).process(instance)
|
||||
|
||||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
|
|
|
|||
|
|
@ -181,19 +181,27 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
self.log.debug("Submitting 3dsMax render..")
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
if instance.data.get("multiCamera"):
|
||||
self.log.debug("Submitting jobs for multiple cameras..")
|
||||
payload = self._use_published_name_for_multiples(
|
||||
payload_data, project_settings)
|
||||
job_infos, plugin_infos = payload
|
||||
for job_info, plugin_info in zip(job_infos, plugin_infos):
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
else:
|
||||
payload = self._use_published_name(payload_data, project_settings)
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
|
||||
def _use_published_name(self, data, project_settings):
|
||||
# Not all hosts can import these modules.
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self, auth=None):
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
|
|
@ -332,8 +332,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
|
||||
export_job = self.submit(vray_export_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
|
|
@ -353,7 +355,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit main render job
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
instance.data["deadline"]["auth"])
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
def _tile_render(self, payload):
|
||||
"""Submit as tile render per frame with dependent assembly jobs."""
|
||||
|
|
@ -557,13 +560,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
num_assemblies = len(assembly_payloads)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
for i, payload in enumerate(assembly_payloads):
|
||||
self.log.debug(
|
||||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
assembly_job_id = self.submit(payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
assembly_job_id = self.submit(
|
||||
payload,
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
instance.data["assemblySubmissionJobs"] = assembly_job_ids
|
||||
|
|
|
|||
|
|
@ -424,8 +424,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(self.deadline_url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url,
|
||||
json=payload,
|
||||
timeout=10,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
|
|
@ -210,8 +210,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
|
|||
|
|
@ -304,8 +304,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
response = requests_post(url, json=payload, timeout=10,
|
||||
auth=auth)
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
|
|
@ -467,8 +468,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# Inject deadline url to instances to query DL for job id for overrides
|
||||
for inst in instances:
|
||||
if not "deadline" in inst:
|
||||
inst["deadline"] = {}
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
auth=auth,
|
||||
log=self.log)
|
||||
# some DL return "none" as a pool name
|
||||
if not "none" in pools:
|
||||
if "none" not in pools:
|
||||
pools.append("none")
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from ayon_core.lib import Logger, run_subprocess, AYONSettingsRegistry
|
|||
from ayon_core.lib.vendor_bin_utils import find_tool_in_custom_paths
|
||||
|
||||
from .rr_job import SubmitFile
|
||||
from .rr_job import RRjob, SubmitterParameter # noqa F401
|
||||
from .rr_job import RRJob, SubmitterParameter # noqa F401
|
||||
|
||||
|
||||
class Api:
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -459,36 +459,6 @@ def is_representation_from_latest(representation):
|
|||
)
|
||||
|
||||
|
||||
def get_template_data_from_session(session=None, settings=None):
|
||||
"""Template data for template fill from session keys.
|
||||
|
||||
Args:
|
||||
session (Union[Dict[str, str], None]): The Session to use. If not
|
||||
provided use the currently active global Session.
|
||||
settings (Optional[Dict[str, Any]]): Prepared studio or project
|
||||
settings.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: All available data from session.
|
||||
"""
|
||||
|
||||
if session is not None:
|
||||
project_name = session["AYON_PROJECT_NAME"]
|
||||
folder_path = session["AYON_FOLDER_PATH"]
|
||||
task_name = session["AYON_TASK_NAME"]
|
||||
host_name = session["AYON_HOST_NAME"]
|
||||
else:
|
||||
context = get_current_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
task_name = context["task_name"]
|
||||
host_name = get_current_host_name()
|
||||
|
||||
return get_template_data_with_names(
|
||||
project_name, folder_path, task_name, host_name, settings
|
||||
)
|
||||
|
||||
|
||||
def get_current_context_template_data(settings=None):
|
||||
"""Prepare template data for current context.
|
||||
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
- 'parent' - direct parent name, project name used if is under
|
||||
project
|
||||
|
||||
Required document fields:
|
||||
Folder: 'path' -> Plan to require: 'folderType'
|
||||
Required entity fields:
|
||||
Folder: 'path', 'folderType'
|
||||
|
||||
Args:
|
||||
folder_entity (Dict[str, Any]): Folder entity.
|
||||
|
|
@ -101,6 +101,8 @@ def get_folder_template_data(folder_entity, project_name):
|
|||
return {
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"type": folder_entity["folderType"],
|
||||
"path": path,
|
||||
},
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ import collections
|
|||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.template_data import get_folder_template_data
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
|
||||
|
|
@ -383,24 +384,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
# - 'folder', 'hierarchy', 'parent', 'folder'
|
||||
folder_entity = instance.data.get("folderEntity")
|
||||
if folder_entity:
|
||||
folder_name = folder_entity["name"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy_parts = folder_path.split("/")
|
||||
hierarchy_parts.pop(0)
|
||||
hierarchy_parts.pop(-1)
|
||||
parent_name = project_entity["name"]
|
||||
if hierarchy_parts:
|
||||
parent_name = hierarchy_parts[-1]
|
||||
|
||||
hierarchy = "/".join(hierarchy_parts)
|
||||
anatomy_data.update({
|
||||
"asset": folder_name,
|
||||
"hierarchy": hierarchy,
|
||||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
},
|
||||
})
|
||||
folder_data = get_folder_template_data(
|
||||
folder_entity,
|
||||
project_entity["name"]
|
||||
)
|
||||
anatomy_data.update(folder_data)
|
||||
return
|
||||
|
||||
if instance.data.get("newAssetPublishing"):
|
||||
|
|
@ -418,6 +406,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
"parent": parent_name,
|
||||
"folder": {
|
||||
"name": folder_name,
|
||||
"path": instance.data["folderPath"],
|
||||
# TODO get folder type from hierarchy
|
||||
# Using 'Shot' is current default behavior of editorial
|
||||
# (or 'newAssetPublishing') publishing.
|
||||
"type": "Shot",
|
||||
},
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -108,69 +108,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Integrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["workfile",
|
||||
"pointcache",
|
||||
"pointcloud",
|
||||
"proxyAbc",
|
||||
"camera",
|
||||
"animation",
|
||||
"model",
|
||||
"maxScene",
|
||||
"mayaAscii",
|
||||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"ass",
|
||||
"assProxy",
|
||||
"vdbcache",
|
||||
"scene",
|
||||
"vrayproxy",
|
||||
"vrayscene_layer",
|
||||
"render",
|
||||
"prerender",
|
||||
"imagesequence",
|
||||
"review",
|
||||
"rendersetup",
|
||||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"ociolook",
|
||||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
"nukenodes",
|
||||
"gizmo",
|
||||
"source",
|
||||
"matchmove",
|
||||
"image",
|
||||
"assembly",
|
||||
"fbx",
|
||||
"gltf",
|
||||
"textures",
|
||||
"action",
|
||||
"harmony.template",
|
||||
"harmony.palette",
|
||||
"editorial",
|
||||
"background",
|
||||
"camerarig",
|
||||
"redshiftproxy",
|
||||
"effect",
|
||||
"xgen",
|
||||
"hda",
|
||||
"usd",
|
||||
"staticMesh",
|
||||
"skeletalMesh",
|
||||
"mvLook",
|
||||
"mvUsd",
|
||||
"mvUsdComposition",
|
||||
"mvUsdOverride",
|
||||
"online",
|
||||
"uasset",
|
||||
"blendScene",
|
||||
"yeticacheUE",
|
||||
"tycache",
|
||||
"csv_ingest_file",
|
||||
]
|
||||
|
||||
default_template_name = "publish"
|
||||
|
||||
|
|
@ -360,7 +297,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Compute the resource file infos once (files belonging to the
|
||||
# version instance instead of an individual representation) so
|
||||
# we can re-use those file infos per representation
|
||||
# we can reuse those file infos per representation
|
||||
resource_file_infos = self.get_files_info(
|
||||
resource_destinations, anatomy
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import filter_profiles
|
||||
from ayon_core.host import ILoadHost
|
||||
from ayon_core.pipeline.load import any_outdated_containers
|
||||
from ayon_core.pipeline import (
|
||||
get_current_host_name,
|
||||
registered_host,
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
|
@ -18,17 +23,50 @@ class ShowInventory(pyblish.api.Action):
|
|||
host_tools.show_scene_inventory()
|
||||
|
||||
|
||||
class ValidateContainers(OptionalPyblishPluginMixin,
|
||||
pyblish.api.ContextPlugin):
|
||||
|
||||
class ValidateOutdatedContainers(
|
||||
OptionalPyblishPluginMixin,
|
||||
pyblish.api.ContextPlugin
|
||||
):
|
||||
"""Containers are must be updated to latest version on publish."""
|
||||
|
||||
label = "Validate Outdated Containers"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "houdini", "nuke", "harmony", "photoshop", "aftereffects"]
|
||||
|
||||
optional = True
|
||||
actions = [ShowInventory]
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, settings):
|
||||
# Disable plugin if host does not inherit from 'ILoadHost'
|
||||
# - not a host that can load containers
|
||||
host = registered_host()
|
||||
if not isinstance(host, ILoadHost):
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
# Disable if no profile is found for the current host
|
||||
profiles = (
|
||||
settings
|
||||
["core"]
|
||||
["publish"]
|
||||
["ValidateOutdatedContainers"]
|
||||
["plugin_state_profiles"]
|
||||
)
|
||||
profile = filter_profiles(
|
||||
profiles, {"host_names": get_current_host_name()}
|
||||
)
|
||||
if not profile:
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
# Apply settings from profile
|
||||
for attr_name in {
|
||||
"enabled",
|
||||
"optional",
|
||||
"active",
|
||||
}:
|
||||
setattr(cls, attr_name, profile[attr_name])
|
||||
|
||||
def process(self, context):
|
||||
if not self.is_active(context.data):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import PublishValidationError
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError, OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateVersion(pyblish.api.InstancePlugin):
|
||||
class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
|
||||
"""Validate instance version.
|
||||
|
||||
AYON does not allow overwriting previously published versions.
|
||||
|
|
@ -18,6 +20,9 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
version = instance.data.get("version")
|
||||
latest_version = instance.data.get("latestVersion")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,28 +1,31 @@
|
|||
"""OpenColorIO Wrapper.
|
||||
|
||||
Only to be interpreted by Python 3. It is run in subprocess in case
|
||||
Python 2 hosts needs to use it. Or it is used as module for Python 3
|
||||
processing.
|
||||
|
||||
Providing functionality:
|
||||
- get_colorspace - console command - python 2
|
||||
- returning all available color spaces
|
||||
found in input config path.
|
||||
- _get_colorspace_data - python 3 - module function
|
||||
- returning all available colorspaces
|
||||
found in input config path.
|
||||
- get_views - console command - python 2
|
||||
- returning all available viewers
|
||||
found in input config path.
|
||||
- _get_views_data - python 3 - module function
|
||||
- returning all available viewers
|
||||
found in input config path.
|
||||
Receive OpenColorIO information and store it in JSON format for processed
|
||||
that don't have access to OpenColorIO or their version of OpenColorIO is
|
||||
not compatible.
|
||||
"""
|
||||
|
||||
import click
|
||||
import json
|
||||
from pathlib import Path
|
||||
import PyOpenColorIO as ocio
|
||||
|
||||
import click
|
||||
|
||||
from ayon_core.pipeline.colorspace import (
|
||||
has_compatible_ocio_package,
|
||||
get_display_view_colorspace_name,
|
||||
get_config_file_rules_colorspace_from_filepath,
|
||||
get_config_version_data,
|
||||
get_ocio_config_views,
|
||||
get_ocio_config_colorspaces,
|
||||
)
|
||||
|
||||
|
||||
def _save_output_to_json_file(output, output_path):
|
||||
json_path = Path(output_path)
|
||||
with open(json_path, "w") as stream:
|
||||
json.dump(output, stream)
|
||||
|
||||
print(f"Data are saved to '{json_path}'")
|
||||
|
||||
|
||||
@click.group()
|
||||
|
|
@ -30,404 +33,185 @@ def main():
|
|||
pass # noqa: WPS100
|
||||
|
||||
|
||||
@main.group()
|
||||
def config():
|
||||
"""Config related commands group
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config <command> *args
|
||||
"""
|
||||
pass # noqa: WPS100
|
||||
|
||||
|
||||
@main.group()
|
||||
def colorspace():
|
||||
"""Colorspace related commands group
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config <command> *args
|
||||
"""
|
||||
pass # noqa: WPS100
|
||||
|
||||
|
||||
@config.command(
|
||||
name="get_colorspace",
|
||||
help=(
|
||||
"return all colorspaces from config file "
|
||||
"--path input arg is required"
|
||||
)
|
||||
)
|
||||
@click.option("--in_path", required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option("--out_path", required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def get_colorspace(in_path, out_path):
|
||||
@main.command(
|
||||
name="get_ocio_config_colorspaces",
|
||||
help="return all colorspaces from config file")
|
||||
@click.option(
|
||||
"--config_path",
|
||||
required=True,
|
||||
help="OCIO config path to read ocio config file.",
|
||||
type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--output_path",
|
||||
required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def _get_ocio_config_colorspaces(config_path, output_path):
|
||||
"""Aggregate all colorspace to file.
|
||||
|
||||
Python 2 wrapped console command
|
||||
|
||||
Args:
|
||||
in_path (str): config file path string
|
||||
out_path (str): temp json file path string
|
||||
config_path (str): config file path string
|
||||
output_path (str): temp json file path string
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config get_colorspace
|
||||
--in_path=<path> --out_path=<path>
|
||||
--config_path <path> --output_path <path>
|
||||
"""
|
||||
json_path = Path(out_path)
|
||||
|
||||
out_data = _get_colorspace_data(in_path)
|
||||
|
||||
with open(json_path, "w") as f_:
|
||||
json.dump(out_data, f_)
|
||||
|
||||
print(f"Colorspace data are saved to '{json_path}'")
|
||||
|
||||
|
||||
def _get_colorspace_data(config_path):
|
||||
"""Return all found colorspace data.
|
||||
|
||||
Args:
|
||||
config_path (str): path string leading to config.ocio
|
||||
|
||||
Raises:
|
||||
IOError: Input config does not exist.
|
||||
|
||||
Returns:
|
||||
dict: aggregated available colorspaces
|
||||
"""
|
||||
config_path = Path(config_path)
|
||||
|
||||
if not config_path.is_file():
|
||||
raise IOError(
|
||||
f"Input path `{config_path}` should be `config.ocio` file")
|
||||
|
||||
config = ocio.Config().CreateFromFile(str(config_path))
|
||||
|
||||
colorspace_data = {
|
||||
"roles": {},
|
||||
"colorspaces": {
|
||||
color.getName(): {
|
||||
"family": color.getFamily(),
|
||||
"categories": list(color.getCategories()),
|
||||
"aliases": list(color.getAliases()),
|
||||
"equalitygroup": color.getEqualityGroup(),
|
||||
}
|
||||
for color in config.getColorSpaces()
|
||||
},
|
||||
"displays_views": {
|
||||
f"{view} ({display})": {
|
||||
"display": display,
|
||||
"view": view
|
||||
|
||||
}
|
||||
for display in config.getDisplays()
|
||||
for view in config.getViews(display)
|
||||
},
|
||||
"looks": {}
|
||||
}
|
||||
|
||||
# add looks
|
||||
looks = config.getLooks()
|
||||
if looks:
|
||||
colorspace_data["looks"] = {
|
||||
look.getName(): {"process_space": look.getProcessSpace()}
|
||||
for look in looks
|
||||
}
|
||||
|
||||
# add roles
|
||||
roles = config.getRoles()
|
||||
if roles:
|
||||
colorspace_data["roles"] = {
|
||||
role: {"colorspace": colorspace}
|
||||
for (role, colorspace) in roles
|
||||
}
|
||||
|
||||
return colorspace_data
|
||||
|
||||
|
||||
@config.command(
|
||||
name="get_views",
|
||||
help=(
|
||||
"return all viewers from config file "
|
||||
"--path input arg is required"
|
||||
_save_output_to_json_file(
|
||||
get_ocio_config_colorspaces(config_path),
|
||||
output_path
|
||||
)
|
||||
)
|
||||
@click.option("--in_path", required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option("--out_path", required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def get_views(in_path, out_path):
|
||||
|
||||
|
||||
@main.command(
|
||||
name="get_ocio_config_views",
|
||||
help="All viewers from config file")
|
||||
@click.option(
|
||||
"--config_path",
|
||||
required=True,
|
||||
help="OCIO config path to read ocio config file.",
|
||||
type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--output_path",
|
||||
required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def _get_ocio_config_views(config_path, output_path):
|
||||
"""Aggregate all viewers to file.
|
||||
|
||||
Python 2 wrapped console command
|
||||
|
||||
Args:
|
||||
in_path (str): config file path string
|
||||
out_path (str): temp json file path string
|
||||
config_path (str): config file path string
|
||||
output_path (str): temp json file path string
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config get_views \
|
||||
--in_path=<path> --out_path=<path>
|
||||
--config_path <path> --output <path>
|
||||
"""
|
||||
json_path = Path(out_path)
|
||||
|
||||
out_data = _get_views_data(in_path)
|
||||
|
||||
with open(json_path, "w") as f_:
|
||||
json.dump(out_data, f_)
|
||||
|
||||
print(f"Viewer data are saved to '{json_path}'")
|
||||
|
||||
|
||||
def _get_views_data(config_path):
|
||||
"""Return all found viewer data.
|
||||
|
||||
Args:
|
||||
config_path (str): path string leading to config.ocio
|
||||
|
||||
Raises:
|
||||
IOError: Input config does not exist.
|
||||
|
||||
Returns:
|
||||
dict: aggregated available viewers
|
||||
"""
|
||||
config_path = Path(config_path)
|
||||
|
||||
if not config_path.is_file():
|
||||
raise IOError("Input path should be `config.ocio` file")
|
||||
|
||||
config = ocio.Config().CreateFromFile(str(config_path))
|
||||
|
||||
data_ = {}
|
||||
for display in config.getDisplays():
|
||||
for view in config.getViews(display):
|
||||
colorspace = config.getDisplayViewColorSpaceName(display, view)
|
||||
# Special token. See https://opencolorio.readthedocs.io/en/latest/guides/authoring/authoring.html#shared-views # noqa
|
||||
if colorspace == "<USE_DISPLAY_NAME>":
|
||||
colorspace = display
|
||||
|
||||
data_[f"{display}/{view}"] = {
|
||||
"display": display,
|
||||
"view": view,
|
||||
"colorspace": colorspace
|
||||
}
|
||||
|
||||
return data_
|
||||
|
||||
|
||||
@config.command(
|
||||
name="get_version",
|
||||
help=(
|
||||
"return major and minor version from config file "
|
||||
"--config_path input arg is required"
|
||||
"--out_path input arg is required"
|
||||
_save_output_to_json_file(
|
||||
get_ocio_config_views(config_path),
|
||||
output_path
|
||||
)
|
||||
)
|
||||
@click.option("--config_path", required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option("--out_path", required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def get_version(config_path, out_path):
|
||||
"""Get version of config.
|
||||
|
||||
Python 2 wrapped console command
|
||||
|
||||
@main.command(
|
||||
name="get_config_version_data",
|
||||
help="Get major and minor version from config file")
|
||||
@click.option(
|
||||
"--config_path",
|
||||
required=True,
|
||||
help="OCIO config path to read ocio config file.",
|
||||
type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--output_path",
|
||||
required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def _get_config_version_data(config_path, output_path):
|
||||
"""Get version of config.
|
||||
|
||||
Args:
|
||||
config_path (str): ocio config file path string
|
||||
out_path (str): temp json file path string
|
||||
output_path (str): temp json file path string
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config get_version \
|
||||
--config_path=<path> --out_path=<path>
|
||||
--config_path <path> --output_path <path>
|
||||
"""
|
||||
json_path = Path(out_path)
|
||||
|
||||
out_data = _get_version_data(config_path)
|
||||
|
||||
with open(json_path, "w") as f_:
|
||||
json.dump(out_data, f_)
|
||||
|
||||
print(f"Config version data are saved to '{json_path}'")
|
||||
|
||||
|
||||
def _get_version_data(config_path):
|
||||
"""Return major and minor version info.
|
||||
|
||||
Args:
|
||||
config_path (str): path string leading to config.ocio
|
||||
|
||||
Raises:
|
||||
IOError: Input config does not exist.
|
||||
|
||||
Returns:
|
||||
dict: minor and major keys with values
|
||||
"""
|
||||
config_path = Path(config_path)
|
||||
|
||||
if not config_path.is_file():
|
||||
raise IOError("Input path should be `config.ocio` file")
|
||||
|
||||
config = ocio.Config().CreateFromFile(str(config_path))
|
||||
|
||||
return {
|
||||
"major": config.getMajorVersion(),
|
||||
"minor": config.getMinorVersion()
|
||||
}
|
||||
|
||||
|
||||
@colorspace.command(
|
||||
name="get_config_file_rules_colorspace_from_filepath",
|
||||
help=(
|
||||
"return colorspace from filepath "
|
||||
"--config_path - ocio config file path (input arg is required) "
|
||||
"--filepath - any file path (input arg is required) "
|
||||
"--out_path - temp json file path (input arg is required)"
|
||||
_save_output_to_json_file(
|
||||
get_config_version_data(config_path),
|
||||
output_path
|
||||
)
|
||||
)
|
||||
@click.option("--config_path", required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option("--filepath", required=True,
|
||||
help="path to file to get colorspace from",
|
||||
type=click.Path())
|
||||
@click.option("--out_path", required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def get_config_file_rules_colorspace_from_filepath(
|
||||
config_path, filepath, out_path
|
||||
|
||||
|
||||
@main.command(
|
||||
name="get_config_file_rules_colorspace_from_filepath",
|
||||
help="Colorspace file rules from filepath")
|
||||
@click.option(
|
||||
"--config_path",
|
||||
required=True,
|
||||
help="OCIO config path to read ocio config file.",
|
||||
type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--filepath",
|
||||
required=True,
|
||||
help="Path to file to get colorspace from.",
|
||||
type=click.Path())
|
||||
@click.option(
|
||||
"--output_path",
|
||||
required=True,
|
||||
help="Path where to write output json file.",
|
||||
type=click.Path())
|
||||
def _get_config_file_rules_colorspace_from_filepath(
|
||||
config_path, filepath, output_path
|
||||
):
|
||||
"""Get colorspace from file path wrapper.
|
||||
|
||||
Python 2 wrapped console command
|
||||
|
||||
Args:
|
||||
config_path (str): config file path string
|
||||
filepath (str): path string leading to file
|
||||
out_path (str): temp json file path string
|
||||
output_path (str): temp json file path string
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py \
|
||||
> python.exe ./ocio_wrapper.py \
|
||||
colorspace get_config_file_rules_colorspace_from_filepath \
|
||||
--config_path=<path> --filepath=<path> --out_path=<path>
|
||||
--config_path <path> --filepath <path> --output_path <path>
|
||||
"""
|
||||
json_path = Path(out_path)
|
||||
|
||||
colorspace = _get_config_file_rules_colorspace_from_filepath(
|
||||
config_path, filepath)
|
||||
|
||||
with open(json_path, "w") as f_:
|
||||
json.dump(colorspace, f_)
|
||||
|
||||
print(f"Colorspace name is saved to '{json_path}'")
|
||||
_save_output_to_json_file(
|
||||
get_config_file_rules_colorspace_from_filepath(config_path, filepath),
|
||||
output_path
|
||||
)
|
||||
|
||||
|
||||
def _get_config_file_rules_colorspace_from_filepath(config_path, filepath):
|
||||
"""Return found colorspace data found in v2 file rules.
|
||||
|
||||
Args:
|
||||
config_path (str): path string leading to config.ocio
|
||||
filepath (str): path string leading to v2 file rules
|
||||
|
||||
Raises:
|
||||
IOError: Input config does not exist.
|
||||
|
||||
Returns:
|
||||
dict: aggregated available colorspaces
|
||||
"""
|
||||
config_path = Path(config_path)
|
||||
|
||||
if not config_path.is_file():
|
||||
raise IOError(
|
||||
f"Input path `{config_path}` should be `config.ocio` file")
|
||||
|
||||
config = ocio.Config().CreateFromFile(str(config_path))
|
||||
|
||||
# TODO: use `parseColorSpaceFromString` instead if ocio v1
|
||||
colorspace = config.getColorSpaceFromFilepath(str(filepath))
|
||||
|
||||
return colorspace
|
||||
|
||||
|
||||
def _get_display_view_colorspace_name(config_path, display, view):
|
||||
"""Returns the colorspace attribute of the (display, view) pair.
|
||||
|
||||
Args:
|
||||
config_path (str): path string leading to config.ocio
|
||||
display (str): display name e.g. "ACES"
|
||||
view (str): view name e.g. "sRGB"
|
||||
|
||||
|
||||
Raises:
|
||||
IOError: Input config does not exist.
|
||||
|
||||
Returns:
|
||||
view color space name (str) e.g. "Output - sRGB"
|
||||
"""
|
||||
|
||||
config_path = Path(config_path)
|
||||
|
||||
if not config_path.is_file():
|
||||
raise IOError("Input path should be `config.ocio` file")
|
||||
|
||||
config = ocio.Config.CreateFromFile(str(config_path))
|
||||
colorspace = config.getDisplayViewColorSpaceName(display, view)
|
||||
|
||||
return colorspace
|
||||
|
||||
|
||||
@config.command(
|
||||
@main.command(
|
||||
name="get_display_view_colorspace_name",
|
||||
help=(
|
||||
"return default view colorspace name "
|
||||
"for the given display and view "
|
||||
"--path input arg is required"
|
||||
)
|
||||
)
|
||||
@click.option("--in_path", required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option("--out_path", required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
@click.option("--display", required=True,
|
||||
help="display name",
|
||||
type=click.STRING)
|
||||
@click.option("--view", required=True,
|
||||
help="view name",
|
||||
type=click.STRING)
|
||||
def get_display_view_colorspace_name(in_path, out_path,
|
||||
display, view):
|
||||
"Default view colorspace name for the given display and view"
|
||||
))
|
||||
@click.option(
|
||||
"--config_path",
|
||||
required=True,
|
||||
help="path where to read ocio config file",
|
||||
type=click.Path(exists=True))
|
||||
@click.option(
|
||||
"--display",
|
||||
required=True,
|
||||
help="Display name",
|
||||
type=click.STRING)
|
||||
@click.option(
|
||||
"--view",
|
||||
required=True,
|
||||
help="view name",
|
||||
type=click.STRING)
|
||||
@click.option(
|
||||
"--output_path",
|
||||
required=True,
|
||||
help="path where to write output json file",
|
||||
type=click.Path())
|
||||
def _get_display_view_colorspace_name(
|
||||
config_path, display, view, output_path
|
||||
):
|
||||
"""Aggregate view colorspace name to file.
|
||||
|
||||
Wrapper command for processes without access to OpenColorIO
|
||||
|
||||
Args:
|
||||
in_path (str): config file path string
|
||||
out_path (str): temp json file path string
|
||||
config_path (str): config file path string
|
||||
output_path (str): temp json file path string
|
||||
display (str): display name e.g. "ACES"
|
||||
view (str): view name e.g. "sRGB"
|
||||
|
||||
Example of use:
|
||||
> pyton.exe ./ocio_wrapper.py config \
|
||||
get_display_view_colorspace_name --in_path=<path> \
|
||||
--out_path=<path> --display=<display> --view=<view>
|
||||
get_display_view_colorspace_name --config_path <path> \
|
||||
--output_path <path> --display <display> --view <view>
|
||||
"""
|
||||
_save_output_to_json_file(
|
||||
get_display_view_colorspace_name(config_path, display, view),
|
||||
output_path
|
||||
)
|
||||
|
||||
out_data = _get_display_view_colorspace_name(in_path,
|
||||
display,
|
||||
view)
|
||||
|
||||
with open(out_path, "w") as f:
|
||||
json.dump(out_data, f)
|
||||
|
||||
print(f"Display view colorspace saved to '{out_path}'")
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
if not has_compatible_ocio_package():
|
||||
raise RuntimeError("OpenColorIO is not available.")
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import ayon_api
|
|||
import six
|
||||
|
||||
from ayon_core.style import get_default_entity_icon_color
|
||||
from ayon_core.lib import CacheItem
|
||||
from ayon_core.lib import CacheItem, NestedCacheItem
|
||||
|
||||
PROJECTS_MODEL_SENDER = "projects.model"
|
||||
|
||||
|
|
@ -17,6 +17,49 @@ class AbstractHierarchyController:
|
|||
pass
|
||||
|
||||
|
||||
class StatusItem:
|
||||
"""Item representing status of project.
|
||||
|
||||
Args:
|
||||
name (str): Status name ("Not ready").
|
||||
color (str): Status color in hex ("#434a56").
|
||||
short (str): Short status name ("NRD").
|
||||
icon (str): Icon name in MaterialIcons ("fiber_new").
|
||||
state (Literal["not_started", "in_progress", "done", "blocked"]):
|
||||
Status state.
|
||||
|
||||
"""
|
||||
def __init__(self, name, color, short, icon, state):
|
||||
self.name = name
|
||||
self.color = color
|
||||
self.short = short
|
||||
self.icon = icon
|
||||
self.state = state
|
||||
|
||||
def to_data(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"color": self.color,
|
||||
"short": self.short,
|
||||
"icon": self.icon,
|
||||
"state": self.state,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, data):
|
||||
return cls(**data)
|
||||
|
||||
@classmethod
|
||||
def from_project_item(cls, status_data):
|
||||
return cls(
|
||||
name=status_data["name"],
|
||||
color=status_data["color"],
|
||||
short=status_data["shortName"],
|
||||
icon=status_data["icon"],
|
||||
state=status_data["state"],
|
||||
)
|
||||
|
||||
|
||||
class ProjectItem:
|
||||
"""Item representing folder entity on a server.
|
||||
|
||||
|
|
@ -40,6 +83,23 @@ class ProjectItem:
|
|||
}
|
||||
self.icon = icon
|
||||
|
||||
@classmethod
|
||||
def from_entity(cls, project_entity):
|
||||
"""Creates folder item from entity.
|
||||
|
||||
Args:
|
||||
project_entity (dict[str, Any]): Project entity.
|
||||
|
||||
Returns:
|
||||
ProjectItem: Project item.
|
||||
|
||||
"""
|
||||
return cls(
|
||||
project_entity["name"],
|
||||
project_entity["active"],
|
||||
project_entity["library"],
|
||||
)
|
||||
|
||||
def to_data(self):
|
||||
"""Converts folder item to data.
|
||||
|
||||
|
|
@ -79,7 +139,7 @@ def _get_project_items_from_entitiy(projects):
|
|||
"""
|
||||
|
||||
return [
|
||||
ProjectItem(project["name"], project["active"], project["library"])
|
||||
ProjectItem.from_entity(project)
|
||||
for project in projects
|
||||
]
|
||||
|
||||
|
|
@ -87,18 +147,29 @@ def _get_project_items_from_entitiy(projects):
|
|||
class ProjectsModel(object):
|
||||
def __init__(self, controller):
|
||||
self._projects_cache = CacheItem(default_factory=list)
|
||||
self._project_items_by_name = {}
|
||||
self._projects_by_name = {}
|
||||
self._project_statuses_cache = NestedCacheItem(
|
||||
levels=1, default_factory=list
|
||||
)
|
||||
self._projects_by_name = NestedCacheItem(
|
||||
levels=1, default_factory=list
|
||||
)
|
||||
|
||||
self._is_refreshing = False
|
||||
self._controller = controller
|
||||
|
||||
def reset(self):
|
||||
self._projects_cache.reset()
|
||||
self._project_items_by_name = {}
|
||||
self._projects_by_name = {}
|
||||
self._project_statuses_cache.reset()
|
||||
self._projects_by_name.reset()
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh project items.
|
||||
|
||||
This method will requery list of ProjectItem returned by
|
||||
'get_project_items'.
|
||||
|
||||
To reset all cached items use 'reset' method.
|
||||
"""
|
||||
self._refresh_projects_cache()
|
||||
|
||||
def get_project_items(self, sender):
|
||||
|
|
@ -117,12 +188,51 @@ class ProjectsModel(object):
|
|||
return self._projects_cache.get_data()
|
||||
|
||||
def get_project_entity(self, project_name):
|
||||
if project_name not in self._projects_by_name:
|
||||
"""Get project entity.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Project entity or None if project
|
||||
was not found by name.
|
||||
|
||||
"""
|
||||
project_cache = self._projects_by_name[project_name]
|
||||
if not project_cache.is_valid:
|
||||
entity = None
|
||||
if project_name:
|
||||
entity = ayon_api.get_project(project_name)
|
||||
self._projects_by_name[project_name] = entity
|
||||
return self._projects_by_name[project_name]
|
||||
project_cache.update_data(entity)
|
||||
return project_cache.get_data()
|
||||
|
||||
def get_project_status_items(self, project_name, sender):
|
||||
"""Get project status items.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
sender (Union[str, None]): Name of sender who asked for items.
|
||||
|
||||
Returns:
|
||||
list[StatusItem]: Status items for project.
|
||||
|
||||
"""
|
||||
statuses_cache = self._project_statuses_cache[project_name]
|
||||
if not statuses_cache.is_valid:
|
||||
with self._project_statuses_refresh_event_manager(
|
||||
sender, project_name
|
||||
):
|
||||
project_entity = None
|
||||
if project_name:
|
||||
project_entity = self.get_project_entity(project_name)
|
||||
statuses = []
|
||||
if project_entity:
|
||||
statuses = [
|
||||
StatusItem.from_project_item(status)
|
||||
for status in project_entity["statuses"]
|
||||
]
|
||||
statuses_cache.update_data(statuses)
|
||||
return statuses_cache.get_data()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _project_refresh_event_manager(self, sender):
|
||||
|
|
@ -143,6 +253,23 @@ class ProjectsModel(object):
|
|||
)
|
||||
self._is_refreshing = False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _project_statuses_refresh_event_manager(self, sender, project_name):
|
||||
self._controller.emit_event(
|
||||
"projects.statuses.refresh.started",
|
||||
{"sender": sender, "project_name": project_name},
|
||||
PROJECTS_MODEL_SENDER
|
||||
)
|
||||
try:
|
||||
yield
|
||||
|
||||
finally:
|
||||
self._controller.emit_event(
|
||||
"projects.statuses.refresh.finished",
|
||||
{"sender": sender, "project_name": project_name},
|
||||
PROJECTS_MODEL_SENDER
|
||||
)
|
||||
|
||||
def _refresh_projects_cache(self, sender=None):
|
||||
if self._is_refreshing:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -290,6 +290,34 @@ class ActionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
painter.drawPixmap(extender_x, extender_y, pix)
|
||||
|
||||
|
||||
class ActionsProxyModel(QtCore.QSortFilterProxyModel):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
def lessThan(self, left, right):
|
||||
# Sort by action order and then by label
|
||||
left_value = left.data(ACTION_SORT_ROLE)
|
||||
right_value = right.data(ACTION_SORT_ROLE)
|
||||
|
||||
# Values are same -> use super sorting
|
||||
if left_value == right_value:
|
||||
# Default behavior is using DisplayRole
|
||||
return super().lessThan(left, right)
|
||||
|
||||
# Validate 'None' values
|
||||
if right_value is None:
|
||||
return True
|
||||
if left_value is None:
|
||||
return False
|
||||
# Sort values and handle incompatible types
|
||||
try:
|
||||
return left_value < right_value
|
||||
except TypeError:
|
||||
return True
|
||||
|
||||
|
||||
class ActionsWidget(QtWidgets.QWidget):
|
||||
def __init__(self, controller, parent):
|
||||
super(ActionsWidget, self).__init__(parent)
|
||||
|
|
@ -316,10 +344,7 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
|
||||
model = ActionsQtModel(controller)
|
||||
|
||||
proxy_model = QtCore.QSortFilterProxyModel()
|
||||
proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
proxy_model.setSortRole(ACTION_SORT_ROLE)
|
||||
|
||||
proxy_model = ActionsProxyModel()
|
||||
proxy_model.setSourceModel(model)
|
||||
view.setModel(proxy_model)
|
||||
|
||||
|
|
@ -359,7 +384,8 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
def _on_model_refresh(self):
|
||||
self._proxy_model.sort(0)
|
||||
# Force repaint all items
|
||||
self._view.update()
|
||||
viewport = self._view.viewport()
|
||||
viewport.update()
|
||||
|
||||
def _on_animation(self):
|
||||
time_now = time.time()
|
||||
|
|
|
|||
|
|
@ -114,6 +114,7 @@ class VersionItem:
|
|||
thumbnail_id (Union[str, None]): Thumbnail id.
|
||||
published_time (Union[str, None]): Published time in format
|
||||
'%Y%m%dT%H%M%SZ'.
|
||||
status (Union[str, None]): Status name.
|
||||
author (Union[str, None]): Author.
|
||||
frame_range (Union[str, None]): Frame range.
|
||||
duration (Union[int, None]): Duration.
|
||||
|
|
@ -132,6 +133,7 @@ class VersionItem:
|
|||
thumbnail_id,
|
||||
published_time,
|
||||
author,
|
||||
status,
|
||||
frame_range,
|
||||
duration,
|
||||
handles,
|
||||
|
|
@ -146,6 +148,7 @@ class VersionItem:
|
|||
self.is_hero = is_hero
|
||||
self.published_time = published_time
|
||||
self.author = author
|
||||
self.status = status
|
||||
self.frame_range = frame_range
|
||||
self.duration = duration
|
||||
self.handles = handles
|
||||
|
|
@ -185,6 +188,7 @@ class VersionItem:
|
|||
"is_hero": self.is_hero,
|
||||
"published_time": self.published_time,
|
||||
"author": self.author,
|
||||
"status": self.status,
|
||||
"frame_range": self.frame_range,
|
||||
"duration": self.duration,
|
||||
"handles": self.handles,
|
||||
|
|
@ -488,6 +492,27 @@ class FrontendLoaderController(_BaseLoaderController):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_project_status_items(self, project_name, sender=None):
|
||||
"""Items for all projects available on server.
|
||||
|
||||
Triggers event topics "projects.statuses.refresh.started" and
|
||||
"projects.statuses.refresh.finished" with data:
|
||||
{
|
||||
"sender": sender,
|
||||
"project_name": project_name
|
||||
}
|
||||
|
||||
Args:
|
||||
project_name (Union[str, None]): Project name.
|
||||
sender (Optional[str]): Sender who requested the items.
|
||||
|
||||
Returns:
|
||||
list[StatusItem]: List of status items.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_product_items(self, project_name, folder_ids, sender=None):
|
||||
"""Product items for folder ids.
|
||||
|
|
|
|||
|
|
@ -180,6 +180,11 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
|||
def get_project_items(self, sender=None):
|
||||
return self._projects_model.get_project_items(sender)
|
||||
|
||||
def get_project_status_items(self, project_name, sender=None):
|
||||
return self._projects_model.get_project_status_items(
|
||||
project_name, sender
|
||||
)
|
||||
|
||||
def get_folder_items(self, project_name, sender=None):
|
||||
return self._hierarchy_model.get_folder_items(project_name, sender)
|
||||
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ def version_item_from_entity(version):
|
|||
thumbnail_id=version["thumbnailId"],
|
||||
published_time=published_time,
|
||||
author=author,
|
||||
status=version["status"],
|
||||
frame_range=frame_range,
|
||||
duration=duration,
|
||||
handles=handles,
|
||||
|
|
@ -526,8 +527,11 @@ class ProductsModel:
|
|||
products = list(ayon_api.get_products(project_name, **kwargs))
|
||||
product_ids = {product["id"] for product in products}
|
||||
|
||||
# Add 'status' to fields -> fixed in ayon-python-api 1.0.4
|
||||
fields = ayon_api.get_default_fields_for_type("version")
|
||||
fields.add("status")
|
||||
versions = ayon_api.get_versions(
|
||||
project_name, product_ids=product_ids
|
||||
project_name, product_ids=product_ids, fields=fields
|
||||
)
|
||||
|
||||
return self._create_product_items(
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ from ayon_core.tools.utils.lib import format_version
|
|||
from .products_model import (
|
||||
PRODUCT_ID_ROLE,
|
||||
VERSION_NAME_EDIT_ROLE,
|
||||
VERSION_STATUS_NAME_ROLE,
|
||||
VERSION_STATUS_SHORT_ROLE,
|
||||
VERSION_STATUS_COLOR_ROLE,
|
||||
VERSION_ID_ROLE,
|
||||
PRODUCT_IN_SCENE_ROLE,
|
||||
ACTIVE_SITE_ICON_ROLE,
|
||||
|
|
@ -104,7 +107,10 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
style = QtWidgets.QApplication.style()
|
||||
|
||||
style.drawControl(
|
||||
style.CE_ItemViewItem, option, painter, option.widget
|
||||
QtWidgets.QCommonStyle.CE_ItemViewItem,
|
||||
option,
|
||||
painter,
|
||||
option.widget
|
||||
)
|
||||
|
||||
painter.save()
|
||||
|
|
@ -116,9 +122,14 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
pen.setColor(fg_color)
|
||||
painter.setPen(pen)
|
||||
|
||||
text_rect = style.subElementRect(style.SE_ItemViewItemText, option)
|
||||
text_rect = style.subElementRect(
|
||||
QtWidgets.QCommonStyle.SE_ItemViewItemText,
|
||||
option
|
||||
)
|
||||
text_margin = style.proxy().pixelMetric(
|
||||
style.PM_FocusFrameHMargin, option, option.widget
|
||||
QtWidgets.QCommonStyle.PM_FocusFrameHMargin,
|
||||
option,
|
||||
option.widget
|
||||
) + 1
|
||||
|
||||
painter.drawText(
|
||||
|
|
@ -194,6 +205,57 @@ class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate):
|
|||
option.palette.setBrush(QtGui.QPalette.Text, color)
|
||||
|
||||
|
||||
class StatusDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""Delegate showing status name and short name."""
|
||||
|
||||
def paint(self, painter, option, index):
|
||||
if option.widget:
|
||||
style = option.widget.style()
|
||||
else:
|
||||
style = QtWidgets.QApplication.style()
|
||||
|
||||
style.drawControl(
|
||||
QtWidgets.QCommonStyle.CE_ItemViewItem,
|
||||
option,
|
||||
painter,
|
||||
option.widget
|
||||
)
|
||||
|
||||
painter.save()
|
||||
|
||||
text_rect = style.subElementRect(
|
||||
QtWidgets.QCommonStyle.SE_ItemViewItemText,
|
||||
option
|
||||
)
|
||||
text_margin = style.proxy().pixelMetric(
|
||||
QtWidgets.QCommonStyle.PM_FocusFrameHMargin,
|
||||
option,
|
||||
option.widget
|
||||
) + 1
|
||||
padded_text_rect = text_rect.adjusted(
|
||||
text_margin, 0, - text_margin, 0
|
||||
)
|
||||
|
||||
fm = QtGui.QFontMetrics(option.font)
|
||||
text = index.data(VERSION_STATUS_NAME_ROLE)
|
||||
if padded_text_rect.width() < fm.width(text):
|
||||
text = index.data(VERSION_STATUS_SHORT_ROLE)
|
||||
|
||||
status_color = index.data(VERSION_STATUS_COLOR_ROLE)
|
||||
fg_color = QtGui.QColor(status_color)
|
||||
pen = painter.pen()
|
||||
pen.setColor(fg_color)
|
||||
painter.setPen(pen)
|
||||
|
||||
painter.drawText(
|
||||
padded_text_rect,
|
||||
option.displayAlignment,
|
||||
text
|
||||
)
|
||||
|
||||
painter.restore()
|
||||
|
||||
|
||||
class SiteSyncDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""Paints icons and downloaded representation ration for both sites."""
|
||||
|
||||
|
|
|
|||
|
|
@ -22,18 +22,21 @@ VERSION_HERO_ROLE = QtCore.Qt.UserRole + 11
|
|||
VERSION_NAME_ROLE = QtCore.Qt.UserRole + 12
|
||||
VERSION_NAME_EDIT_ROLE = QtCore.Qt.UserRole + 13
|
||||
VERSION_PUBLISH_TIME_ROLE = QtCore.Qt.UserRole + 14
|
||||
VERSION_AUTHOR_ROLE = QtCore.Qt.UserRole + 15
|
||||
VERSION_FRAME_RANGE_ROLE = QtCore.Qt.UserRole + 16
|
||||
VERSION_DURATION_ROLE = QtCore.Qt.UserRole + 17
|
||||
VERSION_HANDLES_ROLE = QtCore.Qt.UserRole + 18
|
||||
VERSION_STEP_ROLE = QtCore.Qt.UserRole + 19
|
||||
VERSION_AVAILABLE_ROLE = QtCore.Qt.UserRole + 20
|
||||
VERSION_THUMBNAIL_ID_ROLE = QtCore.Qt.UserRole + 21
|
||||
ACTIVE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 22
|
||||
REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 23
|
||||
REPRESENTATIONS_COUNT_ROLE = QtCore.Qt.UserRole + 24
|
||||
SYNC_ACTIVE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 25
|
||||
SYNC_REMOTE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 26
|
||||
VERSION_STATUS_NAME_ROLE = QtCore.Qt.UserRole + 15
|
||||
VERSION_STATUS_SHORT_ROLE = QtCore.Qt.UserRole + 16
|
||||
VERSION_STATUS_COLOR_ROLE = QtCore.Qt.UserRole + 17
|
||||
VERSION_AUTHOR_ROLE = QtCore.Qt.UserRole + 18
|
||||
VERSION_FRAME_RANGE_ROLE = QtCore.Qt.UserRole + 19
|
||||
VERSION_DURATION_ROLE = QtCore.Qt.UserRole + 20
|
||||
VERSION_HANDLES_ROLE = QtCore.Qt.UserRole + 21
|
||||
VERSION_STEP_ROLE = QtCore.Qt.UserRole + 22
|
||||
VERSION_AVAILABLE_ROLE = QtCore.Qt.UserRole + 23
|
||||
VERSION_THUMBNAIL_ID_ROLE = QtCore.Qt.UserRole + 24
|
||||
ACTIVE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 25
|
||||
REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 26
|
||||
REPRESENTATIONS_COUNT_ROLE = QtCore.Qt.UserRole + 27
|
||||
SYNC_ACTIVE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 28
|
||||
SYNC_REMOTE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 29
|
||||
|
||||
|
||||
class ProductsModel(QtGui.QStandardItemModel):
|
||||
|
|
@ -44,6 +47,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
"Product type",
|
||||
"Folder",
|
||||
"Version",
|
||||
"Status",
|
||||
"Time",
|
||||
"Author",
|
||||
"Frames",
|
||||
|
|
@ -69,11 +73,35 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
]
|
||||
]
|
||||
|
||||
version_col = column_labels.index("Version")
|
||||
published_time_col = column_labels.index("Time")
|
||||
product_name_col = column_labels.index("Product name")
|
||||
product_type_col = column_labels.index("Product type")
|
||||
folders_label_col = column_labels.index("Folder")
|
||||
version_col = column_labels.index("Version")
|
||||
status_col = column_labels.index("Status")
|
||||
published_time_col = column_labels.index("Time")
|
||||
author_col = column_labels.index("Author")
|
||||
frame_range_col = column_labels.index("Frames")
|
||||
duration_col = column_labels.index("Duration")
|
||||
handles_col = column_labels.index("Handles")
|
||||
step_col = column_labels.index("Step")
|
||||
in_scene_col = column_labels.index("In scene")
|
||||
sitesync_avail_col = column_labels.index("Availability")
|
||||
_display_role_mapping = {
|
||||
product_name_col: QtCore.Qt.DisplayRole,
|
||||
product_type_col: PRODUCT_TYPE_ROLE,
|
||||
folders_label_col: FOLDER_LABEL_ROLE,
|
||||
version_col: VERSION_NAME_ROLE,
|
||||
status_col: VERSION_STATUS_NAME_ROLE,
|
||||
published_time_col: VERSION_PUBLISH_TIME_ROLE,
|
||||
author_col: VERSION_AUTHOR_ROLE,
|
||||
frame_range_col: VERSION_FRAME_RANGE_ROLE,
|
||||
duration_col: VERSION_DURATION_ROLE,
|
||||
handles_col: VERSION_HANDLES_ROLE,
|
||||
step_col: VERSION_STEP_ROLE,
|
||||
in_scene_col: PRODUCT_IN_SCENE_ROLE,
|
||||
sitesync_avail_col: VERSION_AVAILABLE_ROLE,
|
||||
|
||||
}
|
||||
|
||||
def __init__(self, controller):
|
||||
super(ProductsModel, self).__init__()
|
||||
|
|
@ -96,6 +124,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
|
||||
self._last_project_name = None
|
||||
self._last_folder_ids = []
|
||||
self._last_project_statuses = {}
|
||||
|
||||
def get_product_item_indexes(self):
|
||||
return [
|
||||
|
|
@ -141,6 +170,15 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
if not index.isValid():
|
||||
return None
|
||||
|
||||
if role in (VERSION_STATUS_SHORT_ROLE, VERSION_STATUS_COLOR_ROLE):
|
||||
status_name = self.data(index, VERSION_STATUS_NAME_ROLE)
|
||||
status_item = self._last_project_statuses.get(status_name)
|
||||
if status_item is None:
|
||||
return ""
|
||||
if role == VERSION_STATUS_SHORT_ROLE:
|
||||
return status_item.short
|
||||
return status_item.color
|
||||
|
||||
col = index.column()
|
||||
if col == 0:
|
||||
return super(ProductsModel, self).data(index, role)
|
||||
|
|
@ -168,29 +206,8 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
if role == QtCore.Qt.DisplayRole:
|
||||
if not index.data(PRODUCT_ID_ROLE):
|
||||
return None
|
||||
if col == self.version_col:
|
||||
role = VERSION_NAME_ROLE
|
||||
elif col == 1:
|
||||
role = PRODUCT_TYPE_ROLE
|
||||
elif col == 2:
|
||||
role = FOLDER_LABEL_ROLE
|
||||
elif col == 4:
|
||||
role = VERSION_PUBLISH_TIME_ROLE
|
||||
elif col == 5:
|
||||
role = VERSION_AUTHOR_ROLE
|
||||
elif col == 6:
|
||||
role = VERSION_FRAME_RANGE_ROLE
|
||||
elif col == 7:
|
||||
role = VERSION_DURATION_ROLE
|
||||
elif col == 8:
|
||||
role = VERSION_HANDLES_ROLE
|
||||
elif col == 9:
|
||||
role = VERSION_STEP_ROLE
|
||||
elif col == 10:
|
||||
role = PRODUCT_IN_SCENE_ROLE
|
||||
elif col == 11:
|
||||
role = VERSION_AVAILABLE_ROLE
|
||||
else:
|
||||
role = self._display_role_mapping.get(col)
|
||||
if role is None:
|
||||
return None
|
||||
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
|
|
@ -312,6 +329,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
version_item.published_time, VERSION_PUBLISH_TIME_ROLE
|
||||
)
|
||||
model_item.setData(version_item.author, VERSION_AUTHOR_ROLE)
|
||||
model_item.setData(version_item.status, VERSION_STATUS_NAME_ROLE)
|
||||
model_item.setData(version_item.frame_range, VERSION_FRAME_RANGE_ROLE)
|
||||
model_item.setData(version_item.duration, VERSION_DURATION_ROLE)
|
||||
model_item.setData(version_item.handles, VERSION_HANDLES_ROLE)
|
||||
|
|
@ -393,6 +411,11 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
|
||||
self._last_project_name = project_name
|
||||
self._last_folder_ids = folder_ids
|
||||
status_items = self._controller.get_project_status_items(project_name)
|
||||
self._last_project_statuses = {
|
||||
status_item.name: status_item
|
||||
for status_item in status_items
|
||||
}
|
||||
|
||||
active_site_icon_def = self._controller.get_active_site_icon_def(
|
||||
project_name
|
||||
|
|
|
|||
|
|
@ -22,7 +22,8 @@ from .products_model import (
|
|||
from .products_delegates import (
|
||||
VersionDelegate,
|
||||
LoadedInSceneDelegate,
|
||||
SiteSyncDelegate
|
||||
StatusDelegate,
|
||||
SiteSyncDelegate,
|
||||
)
|
||||
from .actions_utils import show_actions_menu
|
||||
|
||||
|
|
@ -89,6 +90,7 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
90, # Product type
|
||||
130, # Folder label
|
||||
60, # Version
|
||||
100, # Status
|
||||
125, # Time
|
||||
75, # Author
|
||||
75, # Frames
|
||||
|
|
@ -128,20 +130,19 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
products_view.setColumnWidth(idx, width)
|
||||
|
||||
version_delegate = VersionDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.version_col, version_delegate)
|
||||
|
||||
time_delegate = PrettyTimeDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.published_time_col, time_delegate)
|
||||
|
||||
status_delegate = StatusDelegate()
|
||||
in_scene_delegate = LoadedInSceneDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.in_scene_col, in_scene_delegate)
|
||||
|
||||
sitesync_delegate = SiteSyncDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.sitesync_avail_col, sitesync_delegate)
|
||||
|
||||
for col, delegate in (
|
||||
(products_model.version_col, version_delegate),
|
||||
(products_model.published_time_col, time_delegate),
|
||||
(products_model.status_col, status_delegate),
|
||||
(products_model.in_scene_col, in_scene_delegate),
|
||||
(products_model.sitesync_avail_col, sitesync_delegate),
|
||||
):
|
||||
products_view.setItemDelegateForColumn(col, delegate)
|
||||
|
||||
main_layout = QtWidgets.QHBoxLayout(self)
|
||||
main_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
|
@ -175,6 +176,7 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
|
||||
self._version_delegate = version_delegate
|
||||
self._time_delegate = time_delegate
|
||||
self._status_delegate = status_delegate
|
||||
self._in_scene_delegate = in_scene_delegate
|
||||
self._sitesync_delegate = sitesync_delegate
|
||||
|
||||
|
|
|
|||
|
|
@ -335,9 +335,7 @@ class LoaderWindow(QtWidgets.QWidget):
|
|||
|
||||
def closeEvent(self, event):
|
||||
super(LoaderWindow, self).closeEvent(event)
|
||||
# Deselect project so current context will be selected
|
||||
# on next 'showEvent'
|
||||
self._controller.set_selected_project(None)
|
||||
|
||||
self._reset_on_show = True
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
|
|
|
|||
|
|
@ -723,7 +723,6 @@ class ProjectPushItemProcess:
|
|||
dst_project_name = self._item.dst_project_name
|
||||
dst_folder_id = self._item.dst_folder_id
|
||||
dst_task_name = self._item.dst_task_name
|
||||
dst_task_name_low = dst_task_name.lower()
|
||||
new_folder_name = self._item.new_folder_name
|
||||
if not dst_folder_id and not new_folder_name:
|
||||
self._status.set_failed(
|
||||
|
|
@ -765,7 +764,7 @@ class ProjectPushItemProcess:
|
|||
dst_project_name, folder_ids=[folder_entity["id"]]
|
||||
)
|
||||
}
|
||||
task_info = folder_tasks.get(dst_task_name_low)
|
||||
task_info = folder_tasks.get(dst_task_name.lower())
|
||||
if not task_info:
|
||||
self._status.set_failed(
|
||||
f"Could find task with name \"{dst_task_name}\""
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON core addon version."""
|
||||
__version__ = "0.3.1-dev.1"
|
||||
__version__ = "0.3.3-dev.1"
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ aiohttp_json_rpc = "*" # TVPaint server
|
|||
aiohttp-middlewares = "^2.0.0"
|
||||
wsrpc_aiohttp = "^3.1.1" # websocket server
|
||||
Click = "^8"
|
||||
OpenTimelineIO = "0.14.1"
|
||||
OpenTimelineIO = "0.16.0"
|
||||
opencolorio = "2.2.1"
|
||||
Pillow = "9.5.0"
|
||||
pynput = "^1.7.2" # Timers manager - TODO remove
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
name = "core"
|
||||
title = "Core"
|
||||
version = "0.3.1-dev.1"
|
||||
version = "0.3.3-dev.1"
|
||||
|
||||
client_dir = "ayon_core"
|
||||
|
||||
plugin_for = ["ayon_server"]
|
||||
requires = [
|
||||
"~ayon_server-1.0.3+<2.0.0",
|
||||
]
|
||||
|
||||
ayon_server_version = ">=1.0.3,<2.0.0"
|
||||
ayon_launcher_version = ">=1.0.2"
|
||||
ayon_required_addons = {}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any
|
||||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import CoreSettings, DEFAULT_VALUES
|
||||
|
|
@ -9,3 +11,53 @@ class CoreAddon(BaseServerAddon):
|
|||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
return settings_model_cls(**DEFAULT_VALUES)
|
||||
|
||||
async def convert_settings_overrides(
|
||||
self,
|
||||
source_version: str,
|
||||
overrides: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
self._convert_imagio_configs_0_3_1(overrides)
|
||||
# Use super conversion
|
||||
return await super().convert_settings_overrides(
|
||||
source_version, overrides
|
||||
)
|
||||
|
||||
def _convert_imagio_configs_0_3_1(self, overrides):
|
||||
"""Imageio config settings did change to profiles since 0.3.1. ."""
|
||||
imageio_overrides = overrides.get("imageio") or {}
|
||||
if (
|
||||
"ocio_config" not in imageio_overrides
|
||||
or "filepath" not in imageio_overrides["ocio_config"]
|
||||
):
|
||||
return
|
||||
|
||||
ocio_config = imageio_overrides.pop("ocio_config")
|
||||
|
||||
filepath = ocio_config["filepath"]
|
||||
if not filepath:
|
||||
return
|
||||
first_filepath = filepath[0]
|
||||
ocio_config_profiles = imageio_overrides.setdefault(
|
||||
"ocio_config_profiles", []
|
||||
)
|
||||
base_value = {
|
||||
"type": "builtin_path",
|
||||
"product_name": "",
|
||||
"host_names": [],
|
||||
"task_names": [],
|
||||
"task_types": [],
|
||||
"custom_path": "",
|
||||
"builtin_path": "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio"
|
||||
}
|
||||
if first_filepath in (
|
||||
"{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
|
||||
"{BUILTIN_OCIO_ROOT}/nuke-default/config.ocio",
|
||||
):
|
||||
base_value["type"] = "builtin_path"
|
||||
base_value["builtin_path"] = first_filepath
|
||||
else:
|
||||
base_value["type"] = "custom_path"
|
||||
base_value["custom_path"] = first_filepath
|
||||
|
||||
ocio_config_profiles.append(base_value)
|
||||
|
|
|
|||
|
|
@ -54,9 +54,67 @@ class CoreImageIOFileRulesModel(BaseSettingsModel):
|
|||
return value
|
||||
|
||||
|
||||
class CoreImageIOConfigModel(BaseSettingsModel):
|
||||
filepath: list[str] = SettingsField(
|
||||
default_factory=list, title="Config path"
|
||||
def _ocio_config_profile_types():
|
||||
return [
|
||||
{"value": "builtin_path", "label": "AYON built-in OCIO config"},
|
||||
{"value": "custom_path", "label": "Path to OCIO config"},
|
||||
{"value": "product_name", "label": "Published product"},
|
||||
]
|
||||
|
||||
|
||||
def _ocio_built_in_paths():
|
||||
return [
|
||||
{
|
||||
"value": "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
|
||||
"label": "ACES 1.2",
|
||||
"description": "Aces 1.2 OCIO config file."
|
||||
},
|
||||
{
|
||||
"value": "{BUILTIN_OCIO_ROOT}/nuke-default/config.ocio",
|
||||
"label": "Nuke default",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class CoreImageIOConfigProfilesModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
host_names: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Host names"
|
||||
)
|
||||
task_types: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Task types",
|
||||
enum_resolver=task_types_enum
|
||||
)
|
||||
task_names: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Task names"
|
||||
)
|
||||
type: str = SettingsField(
|
||||
title="Profile type",
|
||||
enum_resolver=_ocio_config_profile_types,
|
||||
conditionalEnum=True,
|
||||
default="builtin_path",
|
||||
section="---",
|
||||
)
|
||||
builtin_path: str = SettingsField(
|
||||
"ACES 1.2",
|
||||
title="Built-in OCIO config",
|
||||
enum_resolver=_ocio_built_in_paths,
|
||||
)
|
||||
custom_path: str = SettingsField(
|
||||
"",
|
||||
title="OCIO config path",
|
||||
description="Path to OCIO config. Anatomy formatting is supported.",
|
||||
)
|
||||
product_name: str = SettingsField(
|
||||
"",
|
||||
title="Product name",
|
||||
description=(
|
||||
"Published product name to get OCIO config from. "
|
||||
"Partial match is supported."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -65,9 +123,8 @@ class CoreImageIOBaseModel(BaseSettingsModel):
|
|||
False,
|
||||
title="Enable Color Management"
|
||||
)
|
||||
ocio_config: CoreImageIOConfigModel = SettingsField(
|
||||
default_factory=CoreImageIOConfigModel,
|
||||
title="OCIO config"
|
||||
ocio_config_profiles: list[CoreImageIOConfigProfilesModel] = SettingsField(
|
||||
default_factory=list, title="OCIO config profiles"
|
||||
)
|
||||
file_rules: CoreImageIOFileRulesModel = SettingsField(
|
||||
default_factory=CoreImageIOFileRulesModel,
|
||||
|
|
@ -186,12 +243,17 @@ class CoreSettings(BaseSettingsModel):
|
|||
DEFAULT_VALUES = {
|
||||
"imageio": {
|
||||
"activate_global_color_management": False,
|
||||
"ocio_config": {
|
||||
"filepath": [
|
||||
"{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
|
||||
"{BUILTIN_OCIO_ROOT}/nuke-default/config.ocio"
|
||||
]
|
||||
},
|
||||
"ocio_config_profiles": [
|
||||
{
|
||||
"host_names": [],
|
||||
"task_types": [],
|
||||
"task_names": [],
|
||||
"type": "builtin_path",
|
||||
"builtin_path": "{BUILTIN_OCIO_ROOT}/aces_1.2/config.ocio",
|
||||
"custom_path": "",
|
||||
"product_name": "",
|
||||
}
|
||||
],
|
||||
"file_rules": {
|
||||
"activate_global_file_rules": False,
|
||||
"rules": [
|
||||
|
|
@ -199,42 +261,57 @@ DEFAULT_VALUES = {
|
|||
"name": "example",
|
||||
"pattern": ".*(beauty).*",
|
||||
"colorspace": "ACES - ACEScg",
|
||||
"ext": "exr"
|
||||
"ext": "exr",
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
"studio_name": "",
|
||||
"studio_code": "",
|
||||
"environments": "{\n\"STUDIO_SW\": {\n \"darwin\": \"/mnt/REPO_SW\",\n \"linux\": \"/mnt/REPO_SW\",\n \"windows\": \"P:/REPO_SW\"\n }\n}",
|
||||
"environments": json.dumps(
|
||||
{
|
||||
"STUDIO_SW": {
|
||||
"darwin": "/mnt/REPO_SW",
|
||||
"linux": "/mnt/REPO_SW",
|
||||
"windows": "P:/REPO_SW"
|
||||
}
|
||||
},
|
||||
indent=4
|
||||
),
|
||||
"tools": DEFAULT_TOOLS_VALUES,
|
||||
"version_start_category": {
|
||||
"profiles": []
|
||||
},
|
||||
"publish": DEFAULT_PUBLISH_VALUES,
|
||||
"project_folder_structure": json.dumps({
|
||||
"__project_root__": {
|
||||
"prod": {},
|
||||
"resources": {
|
||||
"footage": {
|
||||
"plates": {},
|
||||
"offline": {}
|
||||
"project_folder_structure": json.dumps(
|
||||
{
|
||||
"__project_root__": {
|
||||
"prod": {},
|
||||
"resources": {
|
||||
"footage": {
|
||||
"plates": {},
|
||||
"offline": {}
|
||||
},
|
||||
"audio": {},
|
||||
"art_dept": {}
|
||||
},
|
||||
"audio": {},
|
||||
"art_dept": {}
|
||||
},
|
||||
"editorial": {},
|
||||
"assets": {
|
||||
"characters": {},
|
||||
"locations": {}
|
||||
},
|
||||
"shots": {}
|
||||
}
|
||||
}, indent=4),
|
||||
"editorial": {},
|
||||
"assets": {
|
||||
"characters": {},
|
||||
"locations": {}
|
||||
},
|
||||
"shots": {}
|
||||
}
|
||||
},
|
||||
indent=4
|
||||
),
|
||||
"project_plugins": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"project_environments": "{}"
|
||||
"project_environments": json.dumps(
|
||||
{},
|
||||
indent=4
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,6 +59,33 @@ class CollectFramesFixDefModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class ValidateOutdatedContainersProfile(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
# Filtering
|
||||
host_names: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Host names"
|
||||
)
|
||||
# Profile values
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
optional: bool = SettingsField(True, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
|
||||
|
||||
class ValidateOutdatedContainersModel(BaseSettingsModel):
|
||||
"""Validate if Publishing intent was selected.
|
||||
|
||||
It is possible to disable validation for specific publishing context
|
||||
with profiles.
|
||||
"""
|
||||
|
||||
_isGroup = True
|
||||
plugin_state_profiles: list[ValidateOutdatedContainersProfile] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Plugin enable state profiles",
|
||||
)
|
||||
|
||||
|
||||
class ValidateIntentProfile(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
hosts: list[str] = SettingsField(default_factory=list, title="Host names")
|
||||
|
|
@ -770,6 +797,10 @@ class PublishPuginsModel(BaseSettingsModel):
|
|||
default_factory=ValidateBaseModel,
|
||||
title="Validate Version"
|
||||
)
|
||||
ValidateOutdatedContainers: ValidateOutdatedContainersModel = SettingsField(
|
||||
default_factory=ValidateOutdatedContainersModel,
|
||||
title="Validate Containers"
|
||||
)
|
||||
ValidateIntent: ValidateIntentModel = SettingsField(
|
||||
default_factory=ValidateIntentModel,
|
||||
title="Validate Intent"
|
||||
|
|
@ -855,6 +886,25 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
"optional": False,
|
||||
"active": True
|
||||
},
|
||||
"ValidateOutdatedContainers": {
|
||||
"plugin_state_profiles": [
|
||||
{
|
||||
# Default host names are based on original
|
||||
# filter of ValidateContainer pyblish plugin
|
||||
"host_names": [
|
||||
"maya",
|
||||
"houdini",
|
||||
"nuke",
|
||||
"harmony",
|
||||
"photoshop",
|
||||
"aftereffects"
|
||||
],
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
}
|
||||
]
|
||||
},
|
||||
"ValidateIntent": {
|
||||
"enabled": False,
|
||||
"profiles": []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "aftereffects"
|
||||
title = "AfterEffects"
|
||||
version = "0.1.3"
|
||||
version = "0.1.4"
|
||||
|
|
|
|||
|
|
@ -22,12 +22,6 @@ class ValidateSceneSettingsModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class ValidateContainersModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
optional: bool = SettingsField(True, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
|
||||
|
||||
class AfterEffectsPublishPlugins(BaseSettingsModel):
|
||||
CollectReview: CollectReviewPluginModel = SettingsField(
|
||||
default_factory=CollectReviewPluginModel,
|
||||
|
|
@ -37,10 +31,6 @@ class AfterEffectsPublishPlugins(BaseSettingsModel):
|
|||
default_factory=ValidateSceneSettingsModel,
|
||||
title="Validate Scene Settings",
|
||||
)
|
||||
ValidateContainers: ValidateContainersModel = SettingsField(
|
||||
default_factory=ValidateContainersModel,
|
||||
title="Validate Containers",
|
||||
)
|
||||
|
||||
|
||||
AE_PUBLISH_PLUGINS_DEFAULTS = {
|
||||
|
|
@ -58,9 +48,4 @@ AE_PUBLISH_PLUGINS_DEFAULTS = {
|
|||
".*"
|
||||
]
|
||||
},
|
||||
"ValidateContainers": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
name = "blender"
|
||||
title = "Blender"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
|
|
|
|||
|
|
@ -151,6 +151,10 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
default_factory=ExtractPlayblastModel,
|
||||
title="Extract Playblast"
|
||||
)
|
||||
ExtractModelUSD: ValidatePluginModel = SettingsField(
|
||||
default_factory=ValidatePluginModel,
|
||||
title="Extract Model USD"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_BLENDER_PUBLISH_SETTINGS = {
|
||||
|
|
@ -348,5 +352,10 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = {
|
|||
},
|
||||
indent=4
|
||||
)
|
||||
},
|
||||
"ExtractModelUSD": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
}
|
||||
}
|
||||
|
|
|
|||
5
server_addon/clockify/client/ayon_clockify/__init__.py
Normal file
5
server_addon/clockify/client/ayon_clockify/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .addon import ClockifyAddon
|
||||
|
||||
__all__ = (
|
||||
"ClockifyAddon",
|
||||
)
|
||||
|
|
@ -2,12 +2,12 @@ import os
|
|||
import threading
|
||||
import time
|
||||
|
||||
from ayon_core.modules import AYONAddon, ITrayModule, IPluginPaths
|
||||
from ayon_core.addon import AYONAddon, ITrayAddon, IPluginPaths
|
||||
|
||||
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
|
||||
|
||||
|
||||
class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
|
||||
class ClockifyAddon(AYONAddon, ITrayAddon, IPluginPaths):
|
||||
name = "clockify"
|
||||
|
||||
def initialize(self, studio_settings):
|
||||
|
|
@ -31,7 +31,7 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
|
|||
# TimersManager attributes
|
||||
# - set `timers_manager_connector` only in `tray_init`
|
||||
self.timers_manager_connector = None
|
||||
self._timers_manager_module = None
|
||||
self._timer_manager_addon = None
|
||||
|
||||
@property
|
||||
def clockify_api(self):
|
||||
|
|
@ -87,7 +87,7 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
|
|||
return {"actions": [actions_path]}
|
||||
|
||||
def get_ftrack_event_handler_paths(self):
|
||||
"""Function for Ftrack module to add ftrack event handler paths."""
|
||||
"""Function for ftrack addon to add ftrack event handler paths."""
|
||||
return {
|
||||
"user": [CLOCKIFY_FTRACK_USER_PATH],
|
||||
"server": [CLOCKIFY_FTRACK_SERVER_PATH],
|
||||
|
|
@ -206,19 +206,19 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
|
|||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
||||
# --- TimersManager connection methods ---
|
||||
def register_timers_manager(self, timer_manager_module):
|
||||
def register_timers_manager(self, timer_manager_addon):
|
||||
"""Store TimersManager for future use."""
|
||||
self._timers_manager_module = timer_manager_module
|
||||
self._timer_manager_addon = timer_manager_addon
|
||||
|
||||
def timer_started(self, data):
|
||||
"""Tell TimersManager that timer started."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_started(self.id, data)
|
||||
if self._timer_manager_addon is not None:
|
||||
self._timer_manager_addon.timer_started(self.id, data)
|
||||
|
||||
def timer_stopped(self):
|
||||
"""Tell TimersManager that timer stopped."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_stopped(self.id)
|
||||
if self._timer_manager_addon is not None:
|
||||
self._timer_manager_addon.timer_stopped(self.id)
|
||||
|
||||
def stop_timer(self):
|
||||
"""Called from TimersManager to stop timer."""
|
||||
|
|
@ -1,15 +1,17 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from ayon_core.lib.local_settings import AYONSecureRegistry
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from .constants import (
|
||||
CLOCKIFY_ENDPOINT,
|
||||
ADMIN_PERMISSION_NAMES,
|
||||
)
|
||||
|
||||
from ayon_core.lib.local_settings import AYONSecureRegistry
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
|
||||
class ClockifyAPI:
|
||||
log = Logger.get_logger(__name__)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue