Merge branch 'develop' into enhancement/nuke_collect_deadline_pools

This commit is contained in:
Jakub Ježek 2024-05-24 15:42:56 +02:00 committed by GitHub
commit 64cc69ecc0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
140 changed files with 1151 additions and 340 deletions

View file

@ -51,6 +51,8 @@ IGNORED_MODULES_IN_AYON = set()
# - this is used to log the missing addon
MOVED_ADDON_MILESTONE_VERSIONS = {
"applications": VersionInfo(0, 2, 0),
"clockify": VersionInfo(0, 2, 0),
"tvpaint": VersionInfo(0, 2, 0),
}
# Inherit from `object` for Python 2 hosts

View file

@ -365,3 +365,62 @@ def maintained_time():
yield
finally:
bpy.context.scene.frame_current = current_time
def get_all_parents(obj):
"""Get all recursive parents of object.
Arguments:
obj (bpy.types.Object): Object to get all parents for.
Returns:
List[bpy.types.Object]: All parents of object
"""
result = []
while True:
obj = obj.parent
if not obj:
break
result.append(obj)
return result
def get_highest_root(objects):
"""Get the highest object (the least parents) among the objects.
If multiple objects have the same amount of parents (or no parents) the
first object found in the input iterable will be returned.
Note that this will *not* return objects outside of the input list, as
such it will not return the root of node from a child node. It is purely
intended to find the highest object among a list of objects. To instead
get the root from one object use, e.g. `get_all_parents(obj)[-1]`
Arguments:
objects (List[bpy.types.Object]): Objects to find the highest root in.
Returns:
Optional[bpy.types.Object]: First highest root found or None if no
`bpy.types.Object` found in input list.
"""
included_objects = {obj.name_full for obj in objects}
num_parents_to_obj = {}
for obj in objects:
if isinstance(obj, bpy.types.Object):
parents = get_all_parents(obj)
# included parents
parents = [parent for parent in parents if
parent.name_full in included_objects]
if not parents:
# A node without parents must be a highest root
return obj
num_parents_to_obj.setdefault(len(parents), obj)
if not num_parents_to_obj:
return
minimum_parent = min(num_parents_to_obj)
return num_parents_to_obj[minimum_parent]

View file

@ -26,7 +26,8 @@ from .ops import (
)
from .lib import imprint
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx",
".usd", ".usdc", ".usda"]
def prepare_scene_name(

View file

@ -0,0 +1,30 @@
"""Create a USD Export."""
from ayon_core.hosts.blender.api import plugin, lib
class CreateUSD(plugin.BaseCreator):
"""Create USD Export"""
identifier = "io.openpype.creators.blender.usd"
name = "usdMain"
label = "USD"
product_type = "usd"
icon = "gears"
def create(
self, product_name: str, instance_data: dict, pre_create_data: dict
):
# Run parent create method
collection = super().create(
product_name, instance_data, pre_create_data
)
if pre_create_data.get("use_selection"):
objects = lib.get_selection()
for obj in objects:
collection.objects.link(obj)
if obj.type == 'EMPTY':
objects.extend(obj.children)
return collection

View file

@ -26,10 +26,10 @@ class CacheModelLoader(plugin.AssetLoader):
Note:
At least for now it only supports Alembic files.
"""
product_types = {"model", "pointcache", "animation"}
representations = {"abc"}
product_types = {"model", "pointcache", "animation", "usd"}
representations = {"abc", "usd"}
label = "Load Alembic"
label = "Load Cache"
icon = "code-fork"
color = "orange"
@ -53,10 +53,21 @@ class CacheModelLoader(plugin.AssetLoader):
plugin.deselect_all()
relative = bpy.context.preferences.filepaths.use_relative_paths
bpy.ops.wm.alembic_import(
filepath=libpath,
relative_path=relative
)
if any(libpath.lower().endswith(ext)
for ext in [".usd", ".usda", ".usdc"]):
# USD
bpy.ops.wm.usd_import(
filepath=libpath,
relative_path=relative
)
else:
# Alembic
bpy.ops.wm.alembic_import(
filepath=libpath,
relative_path=relative
)
imported = lib.get_selection()

View file

@ -12,7 +12,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder
hosts = ["blender"]
families = ["model", "pointcache", "animation", "rig", "camera", "layout",
"blendScene"]
"blendScene", "usd"]
label = "Collect Instance"
def process(self, instance):

View file

@ -0,0 +1,90 @@
import os
import bpy
from ayon_core.pipeline import publish
from ayon_core.hosts.blender.api import plugin, lib
class ExtractUSD(publish.Extractor):
"""Extract as USD."""
label = "Extract USD"
hosts = ["blender"]
families = ["usd"]
def process(self, instance):
# Ignore runtime instances (e.g. USD layers)
# TODO: This is better done via more specific `families`
if not instance.data.get("transientData", {}).get("instance_node"):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.usd"
filepath = os.path.join(stagingdir, filename)
# Perform extraction
self.log.debug("Performing extraction..")
# Select all members to "export selected"
plugin.deselect_all()
selected = []
for obj in instance:
if isinstance(obj, bpy.types.Object):
obj.select_set(True)
selected.append(obj)
root = lib.get_highest_root(objects=instance[:])
if not root:
instance_node = instance.data["transientData"]["instance_node"]
raise publish.KnownPublishError(
f"No root object found in instance: {instance_node.name}"
)
self.log.debug(f"Exporting using active root: {root.name}")
context = plugin.create_blender_context(
active=root, selected=selected)
# Export USD
with bpy.context.temp_override(**context):
bpy.ops.wm.usd_export(
filepath=filepath,
selected_objects_only=True,
export_textures=False,
relative_paths=False,
export_animation=False,
export_hair=False,
export_uvmaps=True,
# TODO: add for new version of Blender (4+?)
# export_mesh_colors=True,
export_normals=True,
export_materials=True,
use_instancing=True
)
plugin.deselect_all()
# Add representation
representation = {
'name': 'usd',
'ext': 'usd',
'files': filename,
"stagingDir": stagingdir,
}
instance.data.setdefault("representations", []).append(representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)
class ExtractModelUSD(ExtractUSD):
"""Extract model as USD."""
label = "Extract USD (Model)"
hosts = ["blender"]
families = ["model"]
# Driven by settings
optional = True

View file

@ -58,3 +58,55 @@ class SelectInvalidAction(pyblish.api.Action):
self.log.info(
"Selecting invalid tools: %s" % ", ".join(sorted(names))
)
class SelectToolAction(pyblish.api.Action):
"""Select invalid output tool in Fusion when plug-in failed.
"""
label = "Select saver"
on = "failed" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(
context,
plugin=plugin,
)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
tools = []
for instance in errored_instances:
tool = instance.data.get("tool")
if tool is not None:
tools.append(tool)
else:
self.log.warning(
"Plug-in returned to be invalid, "
f"but has no saver for instance {instance.name}."
)
if not tools:
# Assume relevant comp is current comp and clear selection
self.log.info("No invalid tools found.")
comp = get_current_comp()
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
return
# Assume a single comp
first_tool = tools[0]
comp = first_tool.Comp()
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
names = set()
for tool in tools:
flow.Select(tool, True)
comp.SetActiveTool(tool)
names.add(tool.Name)
self.log.info(
"Selecting invalid tools: %s" % ", ".join(sorted(names))
)

View file

@ -52,7 +52,7 @@ class CollectFusionRender(
if product_type not in ["render", "image"]:
continue
task_name = context.data["task"]
task_name = inst.data["task"]
tool = inst.data["transientData"]["tool"]
instance_families = inst.data.get("families", [])

View file

@ -0,0 +1,80 @@
# -*- coding: utf-8 -*-
"""Validate if instance context is the same as publish context."""
import pyblish.api
from ayon_core.hosts.fusion.api.action import SelectToolAction
from ayon_core.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
PublishValidationError,
OptionalPyblishPluginMixin
)
class ValidateInstanceInContextFusion(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validator to check if instance context matches context of publish.
When working in per-shot style you always publish data in context of
current asset (shot). This validator checks if this is so. It is optional
so it can be disabled when needed.
"""
# Similar to maya and houdini-equivalent `ValidateInstanceInContext`
order = ValidateContentsOrder
label = "Instance in same Context"
optional = True
hosts = ["fusion"]
actions = [SelectToolAction, RepairAction]
def process(self, instance):
if not self.is_active(instance.data):
return
instance_context = self.get_context(instance.data)
context = self.get_context(instance.context.data)
if instance_context != context:
context_label = "{} > {}".format(*context)
instance_label = "{} > {}".format(*instance_context)
raise PublishValidationError(
message=(
"Instance '{}' publishes to different asset than current "
"context: {}. Current context: {}".format(
instance.name, instance_label, context_label
)
),
description=(
"## Publishing to a different asset\n"
"There are publish instances present which are publishing "
"into a different asset than your current context.\n\n"
"Usually this is not what you want but there can be cases "
"where you might want to publish into another asset or "
"shot. If that's the case you can disable the validation "
"on the instance to ignore it."
)
)
@classmethod
def repair(cls, instance):
create_context = instance.context.data["create_context"]
instance_id = instance.data.get("instance_id")
created_instance = create_context.get_instance_by_id(
instance_id
)
if created_instance is None:
raise RuntimeError(
f"No CreatedInstances found with id '{instance_id} "
f"in {create_context.instances_by_id}"
)
context_asset, context_task = cls.get_context(instance.context.data)
created_instance["folderPath"] = context_asset
created_instance["task"] = context_task
create_context.save_changes()
@staticmethod
def get_context(data):
"""Return asset, task from publishing context data"""
return data["folderPath"], data["task"]

View file

@ -0,0 +1,141 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating Model product type.
Note:
Currently, This creator plugin is the same as 'create_pointcache.py'
But renaming the product type to 'model'.
It's purpose to support
Maya (load/publish model from maya to/from houdini).
It's considered to support multiple representations in the future.
"""
from ayon_core.hosts.houdini.api import plugin
from ayon_core.lib import BoolDef
import hou
class CreateModel(plugin.HoudiniCreator):
"""Create Model"""
identifier = "io.openpype.creators.houdini.model"
label = "Model"
product_type = "model"
icon = "cube"
def create(self, product_name, instance_data, pre_create_data):
instance_data.pop("active", None)
instance_data.update({"node_type": "alembic"})
creator_attributes = instance_data.setdefault(
"creator_attributes", dict())
creator_attributes["farm"] = pre_create_data["farm"]
instance = super(CreateModel, self).create(
product_name,
instance_data,
pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
"use_sop_path": True,
"build_from_path": True,
"path_attrib": "path",
"prim_to_detail_pattern": "cbId",
"format": 2,
"facesets": 0,
"filename": hou.text.expandString(
"$HIP/pyblish/{}.abc".format(product_name))
}
if self.selected_nodes:
selected_node = self.selected_nodes[0]
# Although Houdini allows ObjNode path on `sop_path` for the
# the ROP node we prefer it set to the SopNode path explicitly
# Allow sop level paths (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
parms["sop_path"] = selected_node.path()
self.log.debug(
"Valid SopNode selection, 'SOP Path' in ROP will be set to '%s'."
% selected_node.path()
)
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
# but do not allow other object level nodes types like cameras, etc.
elif isinstance(selected_node, hou.ObjNode) and \
selected_node.type().name() in ["geo"]:
# get the output node with the minimum
# 'outputidx' or the node with display flag
sop_path = self.get_obj_output(selected_node)
if sop_path:
parms["sop_path"] = sop_path.path()
self.log.debug(
"Valid ObjNode selection, 'SOP Path' in ROP will be set to "
"the child path '%s'."
% sop_path.path()
)
if not parms.get("sop_path", None):
self.log.debug(
"Selection isn't valid. 'SOP Path' in ROP will be empty."
)
else:
self.log.debug(
"No Selection. 'SOP Path' in ROP will be empty."
)
instance_node.setParms(parms)
instance_node.parm("trange").set(1)
# Explicitly set f1 and f2 to frame start.
# Which forces the rop node to export one frame.
instance_node.parmTuple('f').deleteAllKeyframes()
fstart = int(hou.hscriptExpression("$FSTART"))
instance_node.parmTuple('f').set((fstart, fstart, 1))
# Lock any parameters in this list
to_lock = ["prim_to_detail_pattern"]
self.lock_parameters(instance_node, to_lock)
def get_network_categories(self):
return [
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_obj_output(self, obj_node):
"""Find output node with the smallest 'outputidx'."""
outputs = obj_node.subnetOutputs()
# if obj_node is empty
if not outputs:
return
# if obj_node has one output child whether its
# sop output node or a node with the render flag
elif len(outputs) == 1:
return outputs[0]
# if there are more than one, then it have multiple output nodes
# return the one with the minimum 'outputidx'
else:
return min(outputs,
key=lambda node: node.evalParm('outputidx'))
def get_instance_attr_defs(self):
return [
BoolDef("farm",
label="Submitting to Farm",
default=False)
]
def get_pre_create_attr_defs(self):
attrs = super().get_pre_create_attr_defs()
# Use same attributes as for instance attributes
return attrs + self.get_instance_attr_defs()

View file

@ -11,7 +11,7 @@ class CollectDataforCache(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.11
families = ["ass", "pointcache",
"mantraifd", "redshiftproxy",
"vdbcache"]
"vdbcache", "model"]
hosts = ["houdini"]
targets = ["local", "remote"]
label = "Collect Data for Cache"
@ -43,10 +43,7 @@ class CollectDataforCache(pyblish.api.InstancePlugin):
cache_files = {"_": instance.data["files"]}
# Convert instance family to pointcache if it is bgeo or abc
# because ???
for family in instance.data["families"]:
if family == "bgeo" or "abc":
instance.data["productType"] = "pointcache"
break
self.log.debug(instance.data["families"])
instance.data.update({
"plugin": "Houdini",
"publish": True

View file

@ -10,7 +10,7 @@ class CollectChunkSize(pyblish.api.InstancePlugin,
order = pyblish.api.CollectorOrder + 0.05
families = ["ass", "pointcache",
"vdbcache", "mantraifd",
"redshiftproxy"]
"redshiftproxy", "model"]
hosts = ["houdini"]
targets = ["local", "remote"]
label = "Collect Chunk Size"

View file

@ -1,21 +1,24 @@
"""Collector for pointcache types.
"""Collector for different types.
This will add additional family to pointcache instance based on
This will add additional families to different instance based on
the creator_identifier parameter.
"""
import pyblish.api
class CollectPointcacheType(pyblish.api.InstancePlugin):
"""Collect data type for pointcache instance."""
"""Collect data type for different instances."""
order = pyblish.api.CollectorOrder
hosts = ["houdini"]
families = ["pointcache"]
label = "Collect type of pointcache"
families = ["pointcache", "model"]
label = "Collect instances types"
def process(self, instance):
if instance.data["creator_identifier"] == "io.openpype.creators.houdini.bgeo": # noqa: E501
instance.data["families"] += ["bgeo"]
elif instance.data["creator_identifier"] == "io.openpype.creators.houdini.pointcache": # noqa: E501
elif instance.data["creator_identifier"] in {
"io.openpype.creators.houdini.pointcache",
"io.openpype.creators.houdini.model"
}:
instance.data["families"] += ["abc"]

View file

@ -132,6 +132,6 @@ class CollectLocalRenderInstances(pyblish.api.InstancePlugin):
]
})
# Remove original render instance
# I can't remove it here as I still need it to trigger the render.
# context.remove(instance)
# Skip integrating original render instance.
# We are not removing it because it's used to trigger the render.
instance.data["integrate"] = False

View file

@ -15,7 +15,8 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
"usd",
"usdrender",
"redshiftproxy",
"staticMesh"
"staticMesh",
"model"
]
hosts = ["houdini"]

View file

@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
"""Validator for checking that export is a single frame."""
import pyblish.api
from ayon_core.pipeline import (
PublishValidationError,
OptionalPyblishPluginMixin
)
from ayon_core.pipeline.publish import ValidateContentsOrder
from ayon_core.hosts.houdini.api.action import SelectInvalidAction
class ValidateSingleFrame(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate Export is a Single Frame.
It checks if rop node is exporting one frame.
This is mainly for Model product type.
"""
families = ["model"]
hosts = ["houdini"]
label = "Validate Single Frame"
order = ValidateContentsOrder + 0.1
actions = [SelectInvalidAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
nodes = [n.path() for n in invalid]
raise PublishValidationError(
"See log for details. "
"Invalid nodes: {0}".format(nodes)
)
@classmethod
def get_invalid(cls, instance):
invalid = []
frame_start = instance.data.get("frameStartHandle")
frame_end = instance.data.get("frameEndHandle")
# This happens if instance node has no 'trange' parameter.
if frame_start is None or frame_end is None:
cls.log.debug(
"No frame data, skipping check.."
)
return
if frame_start != frame_end:
invalid.append(instance.data["instance_node"])
cls.log.error(
"Invalid frame range on '%s'."
"You should use the same frame number for 'f1' "
"and 'f2' parameters.",
instance.data["instance_node"].path()
)
return invalid

View file

@ -16,9 +16,13 @@ class ValidateMeshIsStatic(pyblish.api.InstancePlugin,
"""Validate mesh is static.
It checks if output node is time dependent.
this avoids getting different output from ROP node when extracted
from a different frame than the first frame.
(Might be overly restrictive though)
"""
families = ["staticMesh"]
families = ["staticMesh",
"model"]
hosts = ["houdini"]
label = "Validate Mesh is Static"
order = ValidateContentsOrder + 0.1

View file

@ -7,7 +7,7 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin):
"""Validate Create Intermediate Directories is enabled on ROP node."""
order = pyblish.api.ValidatorOrder
families = ["pointcache", "camera", "vdbcache"]
families = ["pointcache", "camera", "vdbcache", "model"]
hosts = ["houdini"]
label = "Create Intermediate Directories Checked"

View file

@ -22,7 +22,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder
families = ["pointcache", "vdbcache"]
families = ["pointcache", "vdbcache", "model"]
hosts = ["houdini"]
label = "Validate Output Node (SOP)"
actions = [SelectROPAction, SelectInvalidAction]

View file

@ -1,71 +0,0 @@
import pyblish.api
import ayon_core.hosts.maya.api.action
from ayon_core.pipeline.publish import (
PublishValidationError,
ValidateContentsOrder,
OptionalPyblishPluginMixin
)
from maya import cmds
class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate all nodes in skeletonAnim_SET are referenced"""
order = ValidateContentsOrder
hosts = ["maya"]
families = ["animation.fbx"]
label = "Animated Reference Rig"
accepted_controllers = ["transform", "locator"]
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction]
optional = False
def process(self, instance):
if not self.is_active(instance.data):
return
animated_sets = instance.data.get("animated_skeleton", [])
if not animated_sets:
self.log.debug(
"No nodes found in skeletonAnim_SET. "
"Skipping validation of animated reference rig..."
)
return
for animated_reference in animated_sets:
is_referenced = cmds.referenceQuery(
animated_reference, isNodeReferenced=True)
if not bool(is_referenced):
raise PublishValidationError(
"All the content in skeletonAnim_SET"
" should be referenced nodes"
)
invalid_controls = self.validate_controls(animated_sets)
if invalid_controls:
raise PublishValidationError(
"All the content in skeletonAnim_SET"
" should be transforms"
)
@classmethod
def validate_controls(self, set_members):
"""Check if the controller set contains only accepted node types.
Checks if all its set members are within the hierarchy of the root
Checks if the node types of the set members valid
Args:
set_members: list of nodes of the skeleton_anim_set
hierarchy: list of nodes which reside under the root node
Returns:
errors (list)
"""
# Validate control types
invalid = []
set_members = cmds.ls(set_members, long=True)
for node in set_members:
if cmds.nodeType(node) not in self.accepted_controllers:
invalid.append(node)
return invalid

View file

@ -1024,6 +1024,18 @@ def script_name():
return nuke.root().knob("name").value()
def add_button_render_on_farm(node):
name = "renderOnFarm"
label = "Render On Farm"
value = (
"from ayon_core.hosts.nuke.api.utils import submit_render_on_farm;"
"submit_render_on_farm(nuke.thisNode())"
)
knob = nuke.PyScript_Knob(name, label, value)
knob.clearFlag(nuke.STARTLINE)
node.addKnob(knob)
def add_button_write_to_read(node):
name = "createReadNode"
label = "Read From Rendered"
@ -1146,6 +1158,17 @@ def create_write_node(
Return:
node (obj): group node with avalon data as Knobs
'''
# Ensure name does not contain any invalid characters.
special_chars = re.escape("!@#$%^&*()=[]{}|\\;',.<>/?~+-")
special_chars_regex = re.compile(f"[{special_chars}]")
found_special_characters = list(special_chars_regex.findall(name))
msg = (
f"Special characters found in name \"{name}\": "
f"{' '.join(found_special_characters)}"
)
assert not found_special_characters, msg
prenodes = prenodes or []
# filtering variables
@ -1270,6 +1293,10 @@ def create_write_node(
link.setFlag(0x1000)
GN.addKnob(link)
# Adding render farm submission button.
if data.get("render_on_farm", False):
add_button_render_on_farm(GN)
# adding write to read button
add_button_write_to_read(GN)

View file

@ -3,9 +3,15 @@ import re
import nuke
from ayon_core import resources
import pyblish.util
import pyblish.api
from qtpy import QtWidgets
from ayon_core import resources
from ayon_core.pipeline import registered_host
from ayon_core.tools.utils import show_message_dialog
from ayon_core.pipeline.create import CreateContext
def set_context_favorites(favorites=None):
""" Adding favorite folders to nuke's browser
@ -142,3 +148,77 @@ def is_headless():
bool: headless
"""
return QtWidgets.QApplication.instance() is None
def submit_render_on_farm(node):
# Ensure code is executed in root context.
if nuke.root() == nuke.thisNode():
_submit_render_on_farm(node)
else:
# If not in root context, move to the root context and then execute the
# code.
with nuke.root():
_submit_render_on_farm(node)
def _submit_render_on_farm(node):
"""Render on farm submission
This function prepares the context for farm submission, validates it,
extracts relevant data, copies the current workfile to a timestamped copy,
and submits the job to the farm.
Args:
node (Node): The node for which the farm submission is being made.
"""
host = registered_host()
create_context = CreateContext(host)
# Ensure CreateInstance is enabled.
for instance in create_context.instances:
if node.name() != instance.transient_data["node"].name():
continue
instance.data["active"] = True
context = pyblish.api.Context()
context.data["create_context"] = create_context
# Used in pyblish plugin to determine which instance to publish.
context.data["node_name"] = node.name()
# Used in pyblish plugins to determine whether to run or not.
context.data["render_on_farm"] = True
# Since we need to bypass version validation and incrementing, we need to
# remove the plugins from the list that are responsible for these tasks.
plugins = pyblish.api.discover()
blacklist = ["IncrementScriptVersion", "ValidateVersion"]
plugins = [
plugin
for plugin in plugins
if plugin.__name__ not in blacklist
]
context = pyblish.util.publish(context, plugins=plugins)
error_message = ""
success = True
for result in context.data["results"]:
if result["success"]:
continue
success = False
err = result["error"]
error_message += "\n"
error_message += err.formatted_traceback
if not success:
show_message_dialog(
"Publish Errors", error_message, level="critical"
)
return
show_message_dialog(
"Submission Successful", "Submission to the farm was successful."
)

View file

@ -65,12 +65,16 @@ class CreateWriteImage(napi.NukeWriteCreator):
)
def create_instance_node(self, product_name, instance_data):
settings = self.project_settings["nuke"]["create"]["CreateWriteImage"]
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
"productName": product_name,
"fpath_template": self.temp_rendering_path_template
"fpath_template": self.temp_rendering_path_template,
"render_on_farm": (
"render_on_farm" in settings["instance_attributes"]
)
}
write_data.update(instance_data)

View file

@ -46,11 +46,17 @@ class CreateWritePrerender(napi.NukeWriteCreator):
return attr_defs
def create_instance_node(self, product_name, instance_data):
settings = self.project_settings["nuke"]["create"]
settings = settings["CreateWritePrerender"]
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
"productName": product_name,
"fpath_template": self.temp_rendering_path_template
"fpath_template": self.temp_rendering_path_template,
"render_on_farm": (
"render_on_farm" in settings["instance_attributes"]
)
}
write_data.update(instance_data)

View file

@ -40,11 +40,16 @@ class CreateWriteRender(napi.NukeWriteCreator):
return attr_defs
def create_instance_node(self, product_name, instance_data):
settings = self.project_settings["nuke"]["create"]["CreateWriteRender"]
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
"productName": product_name,
"fpath_template": self.temp_rendering_path_template
"fpath_template": self.temp_rendering_path_template,
"render_on_farm": (
"render_on_farm" in settings["instance_attributes"]
)
}
write_data.update(instance_data)

View file

@ -0,0 +1,56 @@
import pyblish.api
from ayon_core.pipeline.publish import (
AYONPyblishPluginMixin
)
class CollectRenderOnFarm(pyblish.api.ContextPlugin):
"""Setup instances for render on farm submission."""
# Needs to be after CollectFromCreateContext
order = pyblish.api.CollectorOrder - 0.49
label = "Collect Render On Farm"
hosts = ["nuke"]
def process(self, context):
if not context.data.get("render_on_farm", False):
return
for instance in context:
if instance.data["family"] == "workfile":
instance.data["active"] = False
continue
# Filter out all other instances.
node = instance.data["transientData"]["node"]
if node.name() != instance.context.data["node_name"]:
instance.data["active"] = False
continue
instance.data["families"].append("render_on_farm")
# Enable for farm publishing.
instance.data["farm"] = True
# Skip workfile version incremental save.
instance.context.data["increment_script_version"] = False
class SetupRenderOnFarm(pyblish.api.InstancePlugin, AYONPyblishPluginMixin):
"""Setup instance for render on farm submission."""
order = pyblish.api.CollectorOrder + 0.4999
label = "Setup Render On Farm"
hosts = ["nuke"]
families = ["render_on_farm"]
def process(self, instance):
# Clear the families as we only want the main family, ei. no review
# etc.
instance.data["families"] = ["render_on_farm"]
# Use the workfile instead of published.
publish_attributes = instance.data["publish_attributes"]
plugin_attributes = publish_attributes["NukeSubmitDeadline"]
plugin_attributes["use_published_workfile"] = False

View file

@ -0,0 +1,36 @@
import os
from datetime import datetime
import shutil
import pyblish.api
from ayon_core.pipeline import registered_host
class ExtractRenderOnFarm(pyblish.api.InstancePlugin):
"""Copy the workfile to a timestamped copy."""
order = pyblish.api.ExtractorOrder + 0.499
label = "Extract Render On Farm"
hosts = ["nuke"]
families = ["render_on_farm"]
def process(self, instance):
if not instance.context.data.get("render_on_farm", False):
return
host = registered_host()
current_datetime = datetime.now()
formatted_timestamp = current_datetime.strftime("%Y%m%d%H%M%S")
base, ext = os.path.splitext(host.current_file())
directory = os.path.join(os.path.dirname(base), "farm_submissions")
if not os.path.exists(directory):
os.makedirs(directory)
filename = "{}_{}{}".format(
os.path.basename(base), formatted_timestamp, ext
)
path = os.path.join(directory, filename).replace("\\", "/")
instance.context.data["currentFile"] = path
shutil.copy(host.current_file(), path)

View file

@ -13,6 +13,8 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin):
hosts = ['nuke']
def process(self, context):
if not context.data.get("increment_script_version", True):
return
assert all(result["success"] for result in context.data["results"]), (
"Publishing not successful so version is not increased.")

View file

@ -80,17 +80,21 @@ def get_engine_versions(env=None):
def get_editor_exe_path(engine_path: Path, engine_version: str) -> Path:
"""Get UE Editor executable path."""
ue_path = engine_path / "Engine/Binaries"
ue_name = "UnrealEditor"
# handle older versions of Unreal Engine
if engine_version.split(".")[0] == "4":
ue_name = "UE4Editor"
if platform.system().lower() == "windows":
if engine_version.split(".")[0] == "4":
ue_path /= "Win64/UE4Editor.exe"
elif engine_version.split(".")[0] == "5":
ue_path /= "Win64/UnrealEditor.exe"
ue_path /= f"Win64/{ue_name}.exe"
elif platform.system().lower() == "linux":
ue_path /= "Linux/UE4Editor"
ue_path /= f"Linux/{ue_name}"
elif platform.system().lower() == "darwin":
ue_path /= "Mac/UE4Editor"
ue_path /= f"Mac/{ue_name}"
return ue_path

View file

@ -1,5 +0,0 @@
from .clockify_module import ClockifyModule
__all__ = (
"ClockifyModule",
)

View file

@ -29,15 +29,11 @@ from ayon_core.pipeline.publish.lib import (
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
# TODO both 'requests_post' and 'requests_get' should not set 'verify' based
# on environment variable. This should be done in a more controlled way,
# e.g. each deadline url could have checkbox to enabled/disable
# ssl verification.
def requests_post(*args, **kwargs):
"""Wrap request post method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline server is
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
This is useful when Deadline server is
running with self-signed certificates and its certificate is not
added to trusted certificates on client machines.
@ -46,10 +42,6 @@ def requests_post(*args, **kwargs):
of defense SSL is providing, and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
auth = kwargs.get("auth")
if auth:
kwargs["auth"] = tuple(auth) # explicit cast to tuple
@ -61,8 +53,8 @@ def requests_post(*args, **kwargs):
def requests_get(*args, **kwargs):
"""Wrap request get method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline server is
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
This is useful when Deadline server is
running with self-signed certificates and its certificate is not
added to trusted certificates on client machines.
@ -71,9 +63,6 @@ def requests_get(*args, **kwargs):
of defense SSL is providing, and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
auth = kwargs.get("auth")
if auth:
kwargs["auth"] = tuple(auth)
@ -466,7 +455,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
self.aux_files = self.get_aux_files()
auth = instance.data["deadline"]["auth"]
job_id = self.process_submission(auth)
verify = instance.data["deadline"]["verify"]
job_id = self.process_submission(auth, verify)
self.log.info("Submitted job to Deadline: {}.".format(job_id))
# TODO: Find a way that's more generic and not render type specific
@ -479,10 +469,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
job_info=render_job_info,
plugin_info=render_plugin_info
)
render_job_id = self.submit(payload, auth)
render_job_id = self.submit(payload, auth, verify)
self.log.info("Render job id: %s", render_job_id)
def process_submission(self, auth=None):
def process_submission(self, auth=None, verify=True):
"""Process data for submission.
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
@ -493,7 +483,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"""
payload = self.assemble_payload()
return self.submit(payload, auth)
return self.submit(payload, auth, verify)
@abstractmethod
def get_job_info(self):
@ -583,7 +573,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"AuxFiles": aux_files or self.aux_files
}
def submit(self, payload, auth):
def submit(self, payload, auth, verify):
"""Submit payload to Deadline API end-point.
This takes payload in the form of JSON file and POST it to
@ -592,6 +582,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
Args:
payload (dict): dict to become json in deadline submission.
auth (tuple): (username, password)
verify (bool): verify SSL certificate if present
Returns:
str: resulting Deadline job id.
@ -601,8 +592,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
"""
url = "{}/api/jobs".format(self._deadline_url)
response = requests_post(url, json=payload,
auth=auth)
response = requests_post(
url, json=payload, auth=auth, verify=verify)
if not response.ok:
self.log.error("Submission failed!")
self.log.error(response.status_code)

View file

@ -76,6 +76,9 @@ class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
)
instance.data["deadline"]["auth"] = None
instance.data["deadline"]["verify"] = (
not deadline_info["not_verify_ssl"])
if not deadline_info["require_authentication"]:
return
# TODO import 'get_addon_site_settings' when available

View file

@ -174,8 +174,9 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
instance.data["toBeRenderedOn"] = "deadline"
payload = self.assemble_payload()
return self.submit(payload,
auth=instance.data["deadline"]["auth"])
auth = instance.data["deadline"]["auth"]
verify = instance.data["deadline"]["verify"]
return self.submit(payload, auth=auth, verify=verify)
def from_published_scene(self):
"""

View file

@ -193,9 +193,11 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
self.expected_files(instance, render_path)
self.log.debug("__ expectedFiles: `{}`".format(
instance.data["expectedFiles"]))
auth = instance.data["deadline"]["auth"]
verify = instance.data["deadline"]["verify"]
response = requests_post(self.deadline_url, json=payload,
auth=instance.data["deadline"]["require_authentication"])
auth=auth,
verify=verify)
if not response.ok:
self.log.error(

View file

@ -242,7 +242,8 @@ class FusionSubmitDeadline(
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(deadline_url)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, auth=auth)
verify = instance.data["deadline"]["verify"]
response = requests_post(url, json=payload, auth=auth, verify=verify)
if not response.ok:
raise Exception(response.text)

View file

@ -181,19 +181,27 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
self.log.debug("Submitting 3dsMax render..")
project_settings = instance.context.data["project_settings"]
auth = instance.data["deadline"]["auth"]
verify = instance.data["deadline"]["verify"]
if instance.data.get("multiCamera"):
self.log.debug("Submitting jobs for multiple cameras..")
payload = self._use_published_name_for_multiples(
payload_data, project_settings)
job_infos, plugin_infos = payload
for job_info, plugin_info in zip(job_infos, plugin_infos):
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
self.submit(
self.assemble_payload(job_info, plugin_info),
auth=auth,
verify=verify
)
else:
payload = self._use_published_name(payload_data, project_settings)
job_info, plugin_info = payload
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
self.submit(
self.assemble_payload(job_info, plugin_info),
auth=auth,
verify=verify
)
def _use_published_name(self, data, project_settings):
# Not all hosts can import these modules.

View file

@ -292,7 +292,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
return plugin_payload
def process_submission(self, auth=None):
def process_submission(self, auth=None, verify=True):
from maya import cmds
instance = self._instance
@ -332,8 +332,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
if "vrayscene" in instance.data["families"]:
self.log.debug("Submitting V-Ray scene render..")
vray_export_payload = self._get_vray_export_payload(payload_data)
export_job = self.submit(vray_export_payload,
instance.data["deadline"]["auth"])
auth=auth,
verify=verify)
payload = self._get_vray_render_payload(payload_data)
@ -353,7 +355,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
# Submit main render job
job_info, plugin_info = payload
self.submit(self.assemble_payload(job_info, plugin_info),
instance.data["deadline"]["auth"])
auth=auth,
verify=verify)
def _tile_render(self, payload):
"""Submit as tile render per frame with dependent assembly jobs."""
@ -557,13 +560,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
# Submit assembly jobs
assembly_job_ids = []
num_assemblies = len(assembly_payloads)
auth = instance.data["deadline"]["auth"]
verify = instance.data["deadline"]["verify"]
for i, payload in enumerate(assembly_payloads):
self.log.debug(
"submitting assembly job {} of {}".format(i + 1,
num_assemblies)
)
assembly_job_id = self.submit(payload,
instance.data["deadline"]["auth"])
assembly_job_id = self.submit(
payload,
auth=auth,
verify=verify
)
assembly_job_ids.append(assembly_job_id)
instance.data["assemblySubmissionJobs"] = assembly_job_ids

View file

@ -424,8 +424,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
self.log.debug("__ expectedFiles: `{}`".format(
instance.data["expectedFiles"]))
auth = instance.data["deadline"]["auth"]
response = requests_post(self.deadline_url, json=payload, timeout=10,
auth=auth)
verify = instance.data["deadline"]["verify"]
response = requests_post(self.deadline_url,
json=payload,
timeout=10,
auth=auth,
verify=verify)
if not response.ok:
raise Exception(response.text)

View file

@ -210,8 +210,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
url = "{}/api/jobs".format(self.deadline_url)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, timeout=10,
auth=auth)
verify = instance.data["deadline"]["verify"]
response = requests_post(
url, json=payload, timeout=10, auth=auth, verify=verify)
if not response.ok:
raise Exception(response.text)

View file

@ -304,8 +304,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
url = "{}/api/jobs".format(self.deadline_url)
auth = instance.data["deadline"]["auth"]
response = requests_post(url, json=payload, timeout=10,
auth=auth)
verify = instance.data["deadline"]["verify"]
response = requests_post(
url, json=payload, timeout=10, auth=auth, verify=verify)
if not response.ok:
raise Exception(response.text)

View file

@ -920,7 +920,7 @@ def get_imageio_config_preset(
project_entity = None
if anatomy is None:
project_entity = ayon_api.get_project(project_name)
anatomy = Anatomy(project_name, project_entity)
anatomy = Anatomy(project_name, project_entity=project_entity)
if env is None:
env = dict(os.environ.items())

View file

@ -1,6 +1,11 @@
import pyblish.api
from ayon_core.lib import filter_profiles
from ayon_core.host import ILoadHost
from ayon_core.pipeline.load import any_outdated_containers
from ayon_core.pipeline import (
get_current_host_name,
registered_host,
PublishXmlValidationError,
OptionalPyblishPluginMixin
)
@ -18,17 +23,50 @@ class ShowInventory(pyblish.api.Action):
host_tools.show_scene_inventory()
class ValidateContainers(OptionalPyblishPluginMixin,
pyblish.api.ContextPlugin):
class ValidateOutdatedContainers(
OptionalPyblishPluginMixin,
pyblish.api.ContextPlugin
):
"""Containers are must be updated to latest version on publish."""
label = "Validate Outdated Containers"
order = pyblish.api.ValidatorOrder
hosts = ["maya", "houdini", "nuke", "harmony", "photoshop", "aftereffects"]
optional = True
actions = [ShowInventory]
@classmethod
def apply_settings(cls, settings):
# Disable plugin if host does not inherit from 'ILoadHost'
# - not a host that can load containers
host = registered_host()
if not isinstance(host, ILoadHost):
cls.enabled = False
return
# Disable if no profile is found for the current host
profiles = (
settings
["core"]
["publish"]
["ValidateOutdatedContainers"]
["plugin_state_profiles"]
)
profile = filter_profiles(
profiles, {"host_names": get_current_host_name()}
)
if not profile:
cls.enabled = False
return
# Apply settings from profile
for attr_name in {
"enabled",
"optional",
"active",
}:
setattr(cls, attr_name, profile[attr_name])
def process(self, context):
if not self.is_active(context.data):
return

View file

@ -1,8 +1,10 @@
import pyblish.api
from ayon_core.pipeline.publish import PublishValidationError
from ayon_core.pipeline.publish import (
PublishValidationError, OptionalPyblishPluginMixin
)
class ValidateVersion(pyblish.api.InstancePlugin):
class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
"""Validate instance version.
AYON does not allow overwriting previously published versions.
@ -18,6 +20,9 @@ class ValidateVersion(pyblish.api.InstancePlugin):
active = True
def process(self, instance):
if not self.is_active(instance.data):
return
version = instance.data.get("version")
latest_version = instance.data.get("latestVersion")

View file

@ -290,6 +290,34 @@ class ActionDelegate(QtWidgets.QStyledItemDelegate):
painter.drawPixmap(extender_x, extender_y, pix)
class ActionsProxyModel(QtCore.QSortFilterProxyModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
def lessThan(self, left, right):
# Sort by action order and then by label
left_value = left.data(ACTION_SORT_ROLE)
right_value = right.data(ACTION_SORT_ROLE)
# Values are same -> use super sorting
if left_value == right_value:
# Default behavior is using DisplayRole
return super().lessThan(left, right)
# Validate 'None' values
if right_value is None:
return True
if left_value is None:
return False
# Sort values and handle incompatible types
try:
return left_value < right_value
except TypeError:
return True
class ActionsWidget(QtWidgets.QWidget):
def __init__(self, controller, parent):
super(ActionsWidget, self).__init__(parent)
@ -316,10 +344,7 @@ class ActionsWidget(QtWidgets.QWidget):
model = ActionsQtModel(controller)
proxy_model = QtCore.QSortFilterProxyModel()
proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
proxy_model.setSortRole(ACTION_SORT_ROLE)
proxy_model = ActionsProxyModel()
proxy_model.setSourceModel(model)
view.setModel(proxy_model)
@ -359,7 +384,8 @@ class ActionsWidget(QtWidgets.QWidget):
def _on_model_refresh(self):
self._proxy_model.sort(0)
# Force repaint all items
self._view.update()
viewport = self._view.viewport()
viewport.update()
def _on_animation(self):
time_now = time.time()

View file

@ -723,7 +723,6 @@ class ProjectPushItemProcess:
dst_project_name = self._item.dst_project_name
dst_folder_id = self._item.dst_folder_id
dst_task_name = self._item.dst_task_name
dst_task_name_low = dst_task_name.lower()
new_folder_name = self._item.new_folder_name
if not dst_folder_id and not new_folder_name:
self._status.set_failed(
@ -765,7 +764,7 @@ class ProjectPushItemProcess:
dst_project_name, folder_ids=[folder_entity["id"]]
)
}
task_info = folder_tasks.get(dst_task_name_low)
task_info = folder_tasks.get(dst_task_name.lower())
if not task_info:
self._status.set_failed(
f"Could find task with name \"{dst_task_name}\""

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
__version__ = "0.3.2-dev.1"
__version__ = "0.3.3-dev.1"

View file

@ -1,6 +1,6 @@
name = "core"
title = "Core"
version = "0.3.2-dev.1"
version = "0.3.3-dev.1"
client_dir = "ayon_core"

View file

@ -59,6 +59,33 @@ class CollectFramesFixDefModel(BaseSettingsModel):
)
class ValidateOutdatedContainersProfile(BaseSettingsModel):
_layout = "expanded"
# Filtering
host_names: list[str] = SettingsField(
default_factory=list,
title="Host names"
)
# Profile values
enabled: bool = SettingsField(True, title="Enabled")
optional: bool = SettingsField(True, title="Optional")
active: bool = SettingsField(True, title="Active")
class ValidateOutdatedContainersModel(BaseSettingsModel):
"""Validate if Publishing intent was selected.
It is possible to disable validation for specific publishing context
with profiles.
"""
_isGroup = True
plugin_state_profiles: list[ValidateOutdatedContainersProfile] = SettingsField(
default_factory=list,
title="Plugin enable state profiles",
)
class ValidateIntentProfile(BaseSettingsModel):
_layout = "expanded"
hosts: list[str] = SettingsField(default_factory=list, title="Host names")
@ -770,6 +797,10 @@ class PublishPuginsModel(BaseSettingsModel):
default_factory=ValidateBaseModel,
title="Validate Version"
)
ValidateOutdatedContainers: ValidateOutdatedContainersModel = SettingsField(
default_factory=ValidateOutdatedContainersModel,
title="Validate Containers"
)
ValidateIntent: ValidateIntentModel = SettingsField(
default_factory=ValidateIntentModel,
title="Validate Intent"
@ -855,6 +886,25 @@ DEFAULT_PUBLISH_VALUES = {
"optional": False,
"active": True
},
"ValidateOutdatedContainers": {
"plugin_state_profiles": [
{
# Default host names are based on original
# filter of ValidateContainer pyblish plugin
"host_names": [
"maya",
"houdini",
"nuke",
"harmony",
"photoshop",
"aftereffects"
],
"enabled": True,
"optional": True,
"active": True
}
]
},
"ValidateIntent": {
"enabled": False,
"profiles": []

View file

@ -1,3 +1,3 @@
name = "aftereffects"
title = "AfterEffects"
version = "0.1.3"
version = "0.1.4"

View file

@ -22,12 +22,6 @@ class ValidateSceneSettingsModel(BaseSettingsModel):
)
class ValidateContainersModel(BaseSettingsModel):
enabled: bool = SettingsField(True, title="Enabled")
optional: bool = SettingsField(True, title="Optional")
active: bool = SettingsField(True, title="Active")
class AfterEffectsPublishPlugins(BaseSettingsModel):
CollectReview: CollectReviewPluginModel = SettingsField(
default_factory=CollectReviewPluginModel,
@ -37,10 +31,6 @@ class AfterEffectsPublishPlugins(BaseSettingsModel):
default_factory=ValidateSceneSettingsModel,
title="Validate Scene Settings",
)
ValidateContainers: ValidateContainersModel = SettingsField(
default_factory=ValidateContainersModel,
title="Validate Containers",
)
AE_PUBLISH_PLUGINS_DEFAULTS = {
@ -58,9 +48,4 @@ AE_PUBLISH_PLUGINS_DEFAULTS = {
".*"
]
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True,
}
}

View file

@ -1,3 +1,3 @@
name = "blender"
title = "Blender"
version = "0.1.8"
version = "0.1.9"

View file

@ -151,6 +151,10 @@ class PublishPluginsModel(BaseSettingsModel):
default_factory=ExtractPlayblastModel,
title="Extract Playblast"
)
ExtractModelUSD: ValidatePluginModel = SettingsField(
default_factory=ValidatePluginModel,
title="Extract Model USD"
)
DEFAULT_BLENDER_PUBLISH_SETTINGS = {
@ -348,5 +352,10 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = {
},
indent=4
)
},
"ExtractModelUSD": {
"enabled": True,
"optional": True,
"active": True
}
}

View file

@ -0,0 +1,5 @@
from .addon import ClockifyAddon
__all__ = (
"ClockifyAddon",
)

View file

@ -2,12 +2,12 @@ import os
import threading
import time
from ayon_core.modules import AYONAddon, ITrayModule, IPluginPaths
from ayon_core.addon import AYONAddon, ITrayAddon, IPluginPaths
from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH
class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
class ClockifyAddon(AYONAddon, ITrayAddon, IPluginPaths):
name = "clockify"
def initialize(self, studio_settings):
@ -31,7 +31,7 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
# TimersManager attributes
# - set `timers_manager_connector` only in `tray_init`
self.timers_manager_connector = None
self._timers_manager_module = None
self._timer_manager_addon = None
@property
def clockify_api(self):
@ -87,7 +87,7 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
return {"actions": [actions_path]}
def get_ftrack_event_handler_paths(self):
"""Function for Ftrack module to add ftrack event handler paths."""
"""Function for ftrack addon to add ftrack event handler paths."""
return {
"user": [CLOCKIFY_FTRACK_USER_PATH],
"server": [CLOCKIFY_FTRACK_SERVER_PATH],
@ -206,19 +206,19 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths):
self.action_stop_timer.setVisible(self.bool_timer_run)
# --- TimersManager connection methods ---
def register_timers_manager(self, timer_manager_module):
def register_timers_manager(self, timer_manager_addon):
"""Store TimersManager for future use."""
self._timers_manager_module = timer_manager_module
self._timer_manager_addon = timer_manager_addon
def timer_started(self, data):
"""Tell TimersManager that timer started."""
if self._timers_manager_module is not None:
self._timers_manager_module.timer_started(self.id, data)
if self._timer_manager_addon is not None:
self._timer_manager_addon.timer_started(self.id, data)
def timer_stopped(self):
"""Tell TimersManager that timer stopped."""
if self._timers_manager_module is not None:
self._timers_manager_module.timer_stopped(self.id)
if self._timer_manager_addon is not None:
self._timer_manager_addon.timer_stopped(self.id)
def stop_timer(self):
"""Called from TimersManager to stop timer."""

View file

@ -1,15 +1,17 @@
import os
import json
import datetime
import requests
from ayon_core.lib.local_settings import AYONSecureRegistry
from ayon_core.lib import Logger
from .constants import (
CLOCKIFY_ENDPOINT,
ADMIN_PERMISSION_NAMES,
)
from ayon_core.lib.local_settings import AYONSecureRegistry
from ayon_core.lib import Logger
class ClockifyAPI:
log = Logger.get_logger(__name__)

View file

@ -1,7 +1,9 @@
import os
import json
from openpype_modules.ftrack.lib import ServerAction
from openpype_modules.clockify.clockify_api import ClockifyAPI
from ayon_clockify.clockify_api import ClockifyAPI
from ayon_ftrack.lib import ServerAction
class SyncClockifyServer(ServerAction):

View file

@ -1,25 +1,20 @@
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.clockify.clockify_api import ClockifyAPI
from ayon_clockify.clockify_api import ClockifyAPI
from ayon_ftrack.lib import BaseAction, statics_icon
class SyncClockifyLocal(BaseAction):
'''Synchronise project names and task types.'''
"""Synchronise project names and task types."""
#: Action identifier.
identifier = 'clockify.sync.local'
#: Action label.
label = 'Sync To Clockify (local)'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: roles that are allowed to register this action
identifier = "clockify.sync.local"
label = "Sync To Clockify"
description = "Synchronise data to Clockify workspace"
role_list = ["Administrator", "project Manager"]
#: icon
icon = statics_icon("app_icons", "clockify-white.png")
def __init__(self, *args, **kwargs):
super(SyncClockifyLocal, self).__init__(*args, **kwargs)
#: CLockifyApi
self.clockify_api = ClockifyAPI()
def discover(self, session, entities, event):
@ -56,7 +51,7 @@ class SyncClockifyLocal(BaseAction):
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Ftrack to Clockify'
'description': 'Sync ftrack to Clockify'
})
})
session.commit()

View file

@ -1,7 +1,8 @@
import ayon_api
from ayon_clockify.clockify_api import ClockifyAPI
from ayon_core.pipeline import LauncherAction
from openpype_modules.clockify.clockify_api import ClockifyAPI
class ClockifyStart(LauncherAction):

View file

@ -1,6 +1,6 @@
import ayon_api
from openpype_modules.clockify.clockify_api import ClockifyAPI
from ayon_clockify.clockify_api import ClockifyAPI
from ayon_core.pipeline import LauncherAction

View file

@ -0,0 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring AYON addon 'clockify' version."""
__version__ = "0.2.0"

View file

@ -1,3 +1,9 @@
name = "clockify"
title = "Clockify"
version = "0.1.1"
version = "0.2.0"
client_dir = "ayon_clockify"
ayon_required_addons = {
"core": ">0.3.2",
}
ayon_compatible_addons = {}

View file

@ -47,7 +47,7 @@ plugin_for = ["ayon_server"]
"""
CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*-
"""Package declaring AYON core addon version."""
"""Package declaring AYON addon '{}' version."""
__version__ = "{}"
'''
@ -183,6 +183,7 @@ def create_addon_zip(
def prepare_client_code(
addon_name: str,
addon_dir: Path,
addon_output_dir: Path,
addon_version: str
@ -211,7 +212,9 @@ def prepare_client_code(
version_path = subpath / "version.py"
if version_path.exists():
with open(version_path, "w") as stream:
stream.write(CLIENT_VERSION_CONTENT.format(addon_version))
stream.write(
CLIENT_VERSION_CONTENT.format(addon_name, addon_version)
)
zip_filepath = private_dir / "client.zip"
with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf:
@ -262,7 +265,9 @@ def create_addon_package(
server_dir, addon_output_dir / "server", dirs_exist_ok=True
)
prepare_client_code(addon_dir, addon_output_dir, addon_version)
prepare_client_code(
package.name, addon_dir, addon_output_dir, addon_version
)
if create_zip:
create_addon_zip(

View file

@ -1,3 +1,3 @@
name = "deadline"
title = "Deadline"
version = "0.1.11"
version = "0.1.12"

View file

@ -38,10 +38,9 @@ class ServerItemSubmodel(BaseSettingsModel):
name: str = SettingsField(title="Name")
value: str = SettingsField(title="Url")
require_authentication: bool = SettingsField(
False,
title="Require authentication")
ssl: bool = SettingsField(False,
title="SSL")
False, title="Require authentication")
not_verify_ssl: bool = SettingsField(
False, title="Don't verify SSL")
class DeadlineSettings(BaseSettingsModel):
@ -78,7 +77,7 @@ DEFAULT_VALUES = {
"name": "default",
"value": "http://127.0.0.1:8082",
"require_authentication": False,
"ssl": False
"not_verify_ssl": False
}
],
"deadline_server": "default",

View file

@ -1,3 +1,3 @@
name = "harmony"
title = "Harmony"
version = "0.1.2"
version = "0.1.3"

View file

@ -45,11 +45,6 @@ DEFAULT_HARMONY_SETTING = {
"optional": True,
"active": True
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateSceneSettings": {
"enabled": True,
"optional": True,

View file

@ -18,14 +18,6 @@ class ValidateAudioPlugin(BaseSettingsModel):
active: bool = SettingsField(True, title="Active")
class ValidateContainersPlugin(BaseSettingsModel):
"""Check if loaded container is scene are latest versions."""
_isGroup = True
enabled: bool = True
optional: bool = SettingsField(False, title="Optional")
active: bool = SettingsField(True, title="Active")
class ValidateSceneSettingsPlugin(BaseSettingsModel):
"""Validate if FrameStart, FrameEnd and Resolution match shot data in DB.
Use regular expressions to limit validations only on particular asset
@ -63,11 +55,6 @@ class HarmonyPublishPlugins(BaseSettingsModel):
default_factory=ValidateAudioPlugin,
)
ValidateContainers: ValidateContainersPlugin = SettingsField(
title="Validate Containers",
default_factory=ValidateContainersPlugin,
)
ValidateSceneSettings: ValidateSceneSettingsPlugin = SettingsField(
title="Validate Scene Settings",
default_factory=ValidateSceneSettingsPlugin,

View file

@ -1,3 +1,3 @@
name = "houdini"
title = "Houdini"
version = "0.2.14"
version = "0.2.15"

View file

@ -57,6 +57,9 @@ class CreatePluginsModel(BaseSettingsModel):
CreateMantraROP: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create Mantra ROP")
CreateModel: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create Model")
CreatePointCache: CreatorModel = SettingsField(
default_factory=CreatorModel,
title="Create PointCache (Abc)")
@ -124,6 +127,10 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = {
"enabled": True,
"default_variants": ["Main"]
},
"CreateModel": {
"enabled": True,
"default_variants": ["Main"]
},
"CreatePointCache": {
"enabled": True,
"default_variants": ["Main"]

View file

@ -77,10 +77,6 @@ class PublishPluginsModel(BaseSettingsModel):
default_factory=CollectLocalRenderInstancesModel,
title="Collect Local Render Instances."
)
ValidateContainers: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Latest Containers.",
section="Validators")
ValidateInstanceInContextHoudini: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Instance is in same Context.")
@ -119,11 +115,6 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = {
]
}
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateInstanceInContextHoudini": {
"enabled": True,
"optional": True,

View file

@ -1,3 +1,3 @@
name = "maya"
title = "Maya"
version = "0.1.18"
version = "0.1.20"

View file

@ -634,10 +634,6 @@ class PublishersModel(BaseSettingsModel):
title="Validate Instance In Context",
section="Validators"
)
ValidateContainers: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Containers"
)
ValidateFrameRange: ValidateFrameRangeModel = SettingsField(
default_factory=ValidateFrameRangeModel,
title="Validate Frame Range"
@ -917,10 +913,6 @@ class PublishersModel(BaseSettingsModel):
default_factory=BasicValidateModel,
title="Validate Rig Controllers",
)
ValidateAnimatedReferenceRig: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Animated Reference Rig",
)
ValidateAnimationContent: BasicValidateModel = SettingsField(
default_factory=BasicValidateModel,
title="Validate Animation Content",
@ -1063,11 +1055,6 @@ DEFAULT_PUBLISH_SETTINGS = {
"optional": True,
"active": True
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateFrameRange": {
"enabled": True,
"optional": True,
@ -1447,11 +1434,6 @@ DEFAULT_PUBLISH_SETTINGS = {
"optional": True,
"active": True
},
"ValidateAnimatedReferenceRig": {
"enabled": True,
"optional": False,
"active": True
},
"ValidateAnimationContent": {
"enabled": True,
"optional": False,

View file

@ -1,3 +1,3 @@
name = "nuke"
title = "Nuke"
version = "0.1.12"
version = "0.1.14"

View file

@ -12,7 +12,11 @@ def instance_attributes_enum():
return [
{"value": "reviewable", "label": "Reviewable"},
{"value": "farm_rendering", "label": "Farm rendering"},
{"value": "use_range_limit", "label": "Use range limit"}
{"value": "use_range_limit", "label": "Use range limit"},
{
"value": "render_on_farm",
"label": "Render On Farm"
}
]

View file

@ -231,10 +231,6 @@ class PublishPluginsModel(BaseSettingsModel):
default_factory=OptionalPluginModel,
section="Validators"
)
ValidateContainers: OptionalPluginModel = SettingsField(
title="Validate Containers",
default_factory=OptionalPluginModel
)
ValidateKnobs: ValidateKnobsModel = SettingsField(
title="Validate Knobs",
default_factory=ValidateKnobsModel
@ -300,11 +296,6 @@ DEFAULT_PUBLISH_PLUGIN_SETTINGS = {
"optional": True,
"active": True
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateKnobs": {
"enabled": False,
"knobs": "\n".join([

View file

@ -1,3 +1,3 @@
name = "photoshop"
title = "Photoshop"
version = "0.1.2"
version = "0.1.3"

View file

@ -83,14 +83,6 @@ class CollectVersionPlugin(BaseSettingsModel):
enabled: bool = SettingsField(True, title="Enabled")
class ValidateContainersPlugin(BaseSettingsModel):
"""Check that workfile contains latest version of loaded items""" # noqa
_isGroup = True
enabled: bool = True
optional: bool = SettingsField(False, title="Optional")
active: bool = SettingsField(True, title="Active")
class ValidateNamingPlugin(BaseSettingsModel):
"""Validate naming of products and layers""" # noqa
invalid_chars: str = SettingsField(
@ -154,11 +146,6 @@ class PhotoshopPublishPlugins(BaseSettingsModel):
default_factory=CollectVersionPlugin,
)
ValidateContainers: ValidateContainersPlugin = SettingsField(
title="Validate Containers",
default_factory=ValidateContainersPlugin,
)
ValidateNaming: ValidateNamingPlugin = SettingsField(
title="Validate naming of products and layers",
default_factory=ValidateNamingPlugin,
@ -187,11 +174,6 @@ DEFAULT_PUBLISH_SETTINGS = {
"CollectVersion": {
"enabled": False
},
"ValidateContainers": {
"enabled": True,
"optional": True,
"active": True
},
"ValidateNaming": {
"invalid_chars": "[ \\\\/+\\*\\?\\(\\)\\[\\]\\{\\}:,;]",
"replace_char": "_"

View file

@ -22,7 +22,7 @@ from aiohttp_json_rpc.protocol import (
from aiohttp_json_rpc.exceptions import RpcError
from ayon_core.lib import emit_event
from ayon_core.hosts.tvpaint.tvpaint_plugin import get_plugin_files_path
from ayon_tvpaint.tvpaint_plugin import get_plugin_files_path
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)

View file

@ -10,7 +10,7 @@ from qtpy import QtWidgets, QtCore, QtGui
from ayon_core import style
from ayon_core.pipeline import install_host
from ayon_core.hosts.tvpaint.api import (
from ayon_tvpaint.api import (
TVPaintHost,
CommunicationWrapper,
)

View file

@ -7,8 +7,9 @@ import requests
import ayon_api
import pyblish.api
from ayon_tvpaint import TVPAINT_ROOT_DIR
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
from ayon_core.hosts.tvpaint import TVPAINT_ROOT_DIR
from ayon_core.settings import get_current_project_settings
from ayon_core.lib import register_event_callback
from ayon_core.pipeline import (

View file

@ -12,7 +12,7 @@ from ayon_core.pipeline.create.creator_plugins import cache_and_get_instances
from .lib import get_layers_data
SHARED_DATA_KEY = "openpype.tvpaint.instances"
SHARED_DATA_KEY = "ayon.tvpaint.instances"
class TVPaintCreatorCommon:
@ -89,6 +89,8 @@ class TVPaintCreatorCommon:
class TVPaintCreator(Creator, TVPaintCreatorCommon):
settings_category = "tvpaint"
def collect_instances(self):
self._collect_create_instances()
@ -140,6 +142,8 @@ class TVPaintCreator(Creator, TVPaintCreatorCommon):
class TVPaintAutoCreator(AutoCreator, TVPaintCreatorCommon):
settings_category = "tvpaint"
def collect_instances(self):
self._collect_create_instances()
@ -152,6 +156,7 @@ class TVPaintAutoCreator(AutoCreator, TVPaintCreatorCommon):
class Loader(LoaderPlugin):
hosts = ["tvpaint"]
settings_category = "tvpaint"
@staticmethod
def get_members_from_container(container):

View file

@ -37,6 +37,6 @@ class TvpaintPrelaunchHook(PreLaunchHook):
self.launch_context.launch_args.extend(remainders)
def launch_script_path(self):
from ayon_core.hosts.tvpaint import get_launch_script_path
from ayon_tvpaint import get_launch_script_path
return get_launch_script_path()

View file

@ -4,8 +4,8 @@ from ayon_core.pipeline.create.creator_plugins import (
ProductConvertorPlugin,
cache_and_get_instances,
)
from ayon_core.hosts.tvpaint.api.plugin import SHARED_DATA_KEY
from ayon_core.hosts.tvpaint.api.lib import get_groups_data
from ayon_tvpaint.api.plugin import SHARED_DATA_KEY
from ayon_tvpaint.api.lib import get_groups_data
class TVPaintLegacyConverted(ProductConvertorPlugin):

View file

@ -52,11 +52,11 @@ from ayon_core.pipeline.create import (
CreatedInstance,
CreatorError,
)
from ayon_core.hosts.tvpaint.api.plugin import (
from ayon_tvpaint.api.plugin import (
TVPaintCreator,
TVPaintAutoCreator,
)
from ayon_core.hosts.tvpaint.api.lib import (
from ayon_tvpaint.api.lib import (
get_layers_data,
get_groups_data,
execute_george_through_file,

View file

@ -1,7 +1,7 @@
import ayon_api
from ayon_core.pipeline import CreatedInstance
from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator
from ayon_tvpaint.api.plugin import TVPaintAutoCreator
class TVPaintReviewCreator(TVPaintAutoCreator):

View file

@ -1,7 +1,7 @@
import ayon_api
from ayon_core.pipeline import CreatedInstance
from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator
from ayon_tvpaint.api.plugin import TVPaintAutoCreator
class TVPaintWorkfileCreator(TVPaintAutoCreator):

View file

@ -1,6 +1,6 @@
from ayon_core.lib.attribute_definitions import BoolDef
from ayon_core.hosts.tvpaint.api import plugin
from ayon_core.hosts.tvpaint.api.lib import execute_george_through_file
from ayon_tvpaint.api import plugin
from ayon_tvpaint.api.lib import execute_george_through_file
class ImportImage(plugin.Loader):

View file

@ -2,12 +2,12 @@ import collections
from ayon_core.lib.attribute_definitions import BoolDef
from ayon_core.pipeline import registered_host
from ayon_core.hosts.tvpaint.api import plugin
from ayon_core.hosts.tvpaint.api.lib import (
from ayon_tvpaint.api import plugin
from ayon_tvpaint.api.lib import (
get_layers_data,
execute_george_through_file,
)
from ayon_core.hosts.tvpaint.api.pipeline import (
from ayon_tvpaint.api.pipeline import (
write_workfile_metadata,
SECTION_NAME_CONTAINERS,
containerise,

View file

@ -1,7 +1,7 @@
import os
import tempfile
from ayon_core.hosts.tvpaint.api import plugin
from ayon_core.hosts.tvpaint.api.lib import (
from ayon_tvpaint.api import plugin
from ayon_tvpaint.api.lib import (
execute_george_through_file,
)

View file

@ -10,11 +10,11 @@ from ayon_core.pipeline.workfile import (
get_last_workfile_with_version,
)
from ayon_core.pipeline.template_data import get_template_data_with_names
from ayon_core.hosts.tvpaint.api import plugin
from ayon_core.hosts.tvpaint.api.lib import (
from ayon_tvpaint.api import plugin
from ayon_tvpaint.api.lib import (
execute_george_through_file,
)
from ayon_core.hosts.tvpaint.api.pipeline import (
from ayon_tvpaint.api.pipeline import (
get_current_workfile_context,
)
from ayon_core.pipeline.version_start import get_versioning_start

View file

@ -14,6 +14,8 @@ class CollectOutputFrameRange(pyblish.api.InstancePlugin):
hosts = ["tvpaint"]
families = ["review", "render"]
settings_category = "tvpaint"
def process(self, instance):
folder_entity = instance.data.get("folderEntity")
if not folder_entity:

View file

@ -9,6 +9,7 @@ class CollectRenderInstances(pyblish.api.InstancePlugin):
hosts = ["tvpaint"]
families = ["render", "review"]
settings_category = "tvpaint"
ignore_render_pass_transparency = False
def process(self, instance):

Some files were not shown because too many files have changed in this diff Show more