mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
370a4cf206
23 changed files with 512 additions and 217 deletions
|
|
@ -327,6 +327,7 @@ class NukeWriteCreator(NukeCreator):
|
|||
"frames": "Use existing frames"
|
||||
}
|
||||
if ("farm_rendering" in self.instance_attributes):
|
||||
rendering_targets["frames_farm"] = "Use existing frames - farm"
|
||||
rendering_targets["farm"] = "Farm rendering"
|
||||
|
||||
return EnumDef(
|
||||
|
|
|
|||
|
|
@ -2,11 +2,13 @@ import nuke
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect all nodes with Avalon knob."""
|
||||
class CollectNukeInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Nuke instance data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Collect Instance Data"
|
||||
label = "Collect Nuke Instance Data"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
# presets
|
||||
|
|
@ -40,5 +42,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
|||
"pixelAspect": pixel_aspect
|
||||
|
||||
})
|
||||
|
||||
# add creator attributes to instance
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
# add review family if review activated on instance
|
||||
if instance.data.get("review"):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
self.log.debug("Collected instance: {}".format(
|
||||
instance.data))
|
||||
|
|
@ -5,7 +5,7 @@ import nuke
|
|||
class CollectSlate(pyblish.api.InstancePlugin):
|
||||
"""Check if SLATE node is in scene and connected to rendering tree"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.09
|
||||
order = pyblish.api.CollectorOrder + 0.002
|
||||
label = "Collect Slate Node"
|
||||
hosts = ["nuke"]
|
||||
families = ["render"]
|
||||
|
|
@ -13,10 +13,14 @@ class CollectSlate(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
node = instance.data["transientData"]["node"]
|
||||
|
||||
slate = next((n for n in nuke.allNodes()
|
||||
if "slate" in n.name().lower()
|
||||
if not n["disable"].getValue()),
|
||||
None)
|
||||
slate = next(
|
||||
(
|
||||
n_ for n_ in nuke.allNodes()
|
||||
if "slate" in n_.name().lower()
|
||||
if not n_["disable"].getValue()
|
||||
),
|
||||
None
|
||||
)
|
||||
|
||||
if slate:
|
||||
# check if slate node is connected to write node tree
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
|
@ -15,30 +14,16 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render", "prerender", "image"]
|
||||
|
||||
# cache
|
||||
_write_nodes = {}
|
||||
_frame_ranges = {}
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(pformat(instance.data))
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
group_node = instance.data["transientData"]["node"]
|
||||
render_target = instance.data["render_target"]
|
||||
family = instance.data["family"]
|
||||
families = instance.data["families"]
|
||||
|
||||
# add targeted family to families
|
||||
instance.data["families"].append(
|
||||
"{}.{}".format(family, render_target)
|
||||
)
|
||||
if instance.data.get("review"):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
child_nodes = napi.get_instance_group_node_childs(instance)
|
||||
instance.data["transientData"]["childNodes"] = child_nodes
|
||||
|
||||
write_node = None
|
||||
for x in child_nodes:
|
||||
if x.Class() == "Write":
|
||||
write_node = x
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
if write_node is None:
|
||||
self.log.warning(
|
||||
|
|
@ -48,113 +33,134 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
)
|
||||
return
|
||||
|
||||
instance.data["writeNode"] = write_node
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
if render_target == "frames":
|
||||
self._set_existing_files_data(instance, colorspace)
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
elif render_target == "frames_farm":
|
||||
collected_frames = self._set_existing_files_data(
|
||||
instance, colorspace)
|
||||
|
||||
if write_node["use_limit"].getValue():
|
||||
first_frame = int(write_node["first"].getValue())
|
||||
last_frame = int(write_node["last"].getValue())
|
||||
self._set_expected_files(instance, collected_frames)
|
||||
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
elif render_target == "farm":
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
# set additional instance data
|
||||
self._set_additional_instance_data(instance, render_target, colorspace)
|
||||
|
||||
def _set_existing_files_data(self, instance, colorspace):
|
||||
"""Set existing files data to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
colorspace (str): colorspace
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
collected_frames = self._get_collected_frames(instance)
|
||||
|
||||
representation = self._get_existing_frames_representation(
|
||||
instance, collected_frames
|
||||
)
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _set_expected_files(self, instance, collected_frames):
|
||||
"""Set expected files to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
"""
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
instance.data["expectedFiles"] = [
|
||||
os.path.join(output_dir, source_file)
|
||||
for source_file in collected_frames
|
||||
]
|
||||
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
def _get_frame_range_data(self, instance):
|
||||
"""Get frame range data from instance.
|
||||
|
||||
if render_target == "frames":
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"stagingDir": output_dir,
|
||||
"tags": []
|
||||
}
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
# get file path knob
|
||||
node_file_knob = write_node["file"]
|
||||
# list file paths based on input frames
|
||||
expected_paths = list(sorted({
|
||||
node_file_knob.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
Returns:
|
||||
tuple: first_frame, last_frame
|
||||
"""
|
||||
|
||||
# convert only to base names
|
||||
expected_filenames = [
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
]
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
# make sure files are existing at folder
|
||||
collected_frames = [
|
||||
filename
|
||||
for filename in os.listdir(output_dir)
|
||||
if filename in expected_filenames
|
||||
]
|
||||
if self._frame_ranges.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._frame_ranges[instance_name]
|
||||
|
||||
if collected_frames:
|
||||
collected_frames_len = len(collected_frames)
|
||||
frame_start_str = "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
representation['frameStart'] = frame_start_str
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# in case slate is expected and not yet rendered
|
||||
self.log.debug("_ frame_length: {}".format(frame_length))
|
||||
self.log.debug("_ collected_frames_len: {}".format(
|
||||
collected_frames_len))
|
||||
# Get frame range from workfile
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if (
|
||||
"slate" in families
|
||||
and frame_length == collected_frames_len
|
||||
and family == "render"
|
||||
):
|
||||
frame_slate_str = (
|
||||
"{{:0{}d}}".format(len(str(last_frame)))
|
||||
).format(first_frame - 1)
|
||||
# Get frame range from write node if activated
|
||||
if write_node["use_limit"].getValue():
|
||||
first_frame = int(write_node["first"].getValue())
|
||||
last_frame = int(write_node["last"].getValue())
|
||||
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
# add to cache
|
||||
self._frame_ranges[instance_name] = (first_frame, last_frame)
|
||||
|
||||
if collected_frames_len == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
return first_frame, last_frame
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
def _set_additional_instance_data(
|
||||
self, instance, render_target, colorspace
|
||||
):
|
||||
"""Set additional instance data.
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Publishing rendered frames ...")
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
render_target (str): render target
|
||||
colorspace (str): colorspace
|
||||
"""
|
||||
family = instance.data["family"]
|
||||
|
||||
elif render_target == "farm":
|
||||
farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"]
|
||||
for key in farm_keys:
|
||||
# Skip if key is not in creator attributes
|
||||
if key not in creator_attributes:
|
||||
continue
|
||||
# Add farm attributes to instance
|
||||
instance.data[key] = creator_attributes[key]
|
||||
# add targeted family to families
|
||||
instance.data["families"].append(
|
||||
"{}.{}".format(family, render_target)
|
||||
)
|
||||
self.log.debug("Appending render target to families: {}.{}".format(
|
||||
family, render_target)
|
||||
)
|
||||
|
||||
# Farm rendering
|
||||
instance.data["transfer"] = False
|
||||
instance.data["farm"] = True
|
||||
self.log.info("Farm rendering ON ...")
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
# get frame range data
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
# get output paths
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# TODO: remove this when we have proper colorspace support
|
||||
version_data = {
|
||||
|
|
@ -188,10 +194,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
"frameEndHandle": last_frame,
|
||||
})
|
||||
|
||||
# make sure rendered sequence on farm will
|
||||
# be used for extract review
|
||||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# TODO temporarily set stagingDir as persistent for backward
|
||||
# compatibility. This is mainly focused on `renders`folders which
|
||||
|
|
@ -199,4 +201,201 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
# this logic should be removed and replaced with custom staging dir
|
||||
instance.data["stagingDir_persistent"] = True
|
||||
|
||||
self.log.debug("instance.data: {}".format(pformat(instance.data)))
|
||||
def _write_node_helper(self, instance):
|
||||
"""Helper function to get write node from instance.
|
||||
|
||||
Also sets instance transient data with child nodes.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
nuke.Node: write node
|
||||
"""
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
if self._write_nodes.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
# get all child nodes from group node
|
||||
child_nodes = napi.get_instance_group_node_childs(instance)
|
||||
|
||||
# set child nodes to instance transient data
|
||||
instance.data["transientData"]["childNodes"] = child_nodes
|
||||
|
||||
write_node = None
|
||||
for node_ in child_nodes:
|
||||
if node_.Class() == "Write":
|
||||
write_node = node_
|
||||
|
||||
if write_node:
|
||||
# for slate frame extraction
|
||||
instance.data["transientData"]["writeNode"] = write_node
|
||||
# add to cache
|
||||
self._write_nodes[instance_name] = write_node
|
||||
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
def _get_existing_frames_representation(
|
||||
self,
|
||||
instance,
|
||||
collected_frames
|
||||
):
|
||||
"""Get existing frames representation.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
|
||||
Returns:
|
||||
dict: representation
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"stagingDir": output_dir,
|
||||
"tags": []
|
||||
}
|
||||
|
||||
frame_start_str = self._get_frame_start_str(first_frame, last_frame)
|
||||
|
||||
representation['frameStart'] = frame_start_str
|
||||
|
||||
# set slate frame
|
||||
collected_frames = self._add_slate_frame_to_collected_frames(
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
)
|
||||
|
||||
if len(collected_frames) == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
|
||||
return representation
|
||||
|
||||
def _get_frame_start_str(self, first_frame, last_frame):
|
||||
"""Get frame start string.
|
||||
|
||||
Args:
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
str: frame start string
|
||||
"""
|
||||
# convert first frame to string with padding
|
||||
return (
|
||||
"{{:0{}d}}".format(len(str(last_frame)))
|
||||
).format(first_frame)
|
||||
|
||||
def _add_slate_frame_to_collected_frames(
|
||||
self,
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
):
|
||||
"""Add slate frame to collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
frame_start_str = self._get_frame_start_str(first_frame, last_frame)
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if (
|
||||
"slate" in instance.data["families"]
|
||||
and frame_length == len(collected_frames)
|
||||
):
|
||||
frame_slate_str = self._get_frame_start_str(
|
||||
first_frame - 1,
|
||||
last_frame
|
||||
)
|
||||
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _add_farm_instance_data(self, instance):
|
||||
"""Add farm publishing related instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
"""
|
||||
|
||||
# make sure rendered sequence on farm will
|
||||
# be used for extract review
|
||||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# Farm rendering
|
||||
instance.data.update({
|
||||
"transfer": False,
|
||||
"farm": True # to skip integrate
|
||||
})
|
||||
self.log.info("Farm rendering ON ...")
|
||||
|
||||
def _get_collected_frames(self, instance):
|
||||
"""Get collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get file path knob
|
||||
node_file_knob = write_node["file"]
|
||||
# list file paths based on input frames
|
||||
expected_paths = list(sorted({
|
||||
node_file_knob.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
|
||||
# convert only to base names
|
||||
expected_filenames = {
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
}
|
||||
|
||||
# make sure files are existing at folder
|
||||
collected_frames = [
|
||||
filename
|
||||
for filename in os.listdir(output_dir)
|
||||
if filename in expected_filenames
|
||||
]
|
||||
|
||||
return collected_frames
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import maintained_selection
|
|||
|
||||
|
||||
class ExtractCamera(publish.Extractor):
|
||||
""" 3D camera exctractor
|
||||
""" 3D camera extractor
|
||||
"""
|
||||
label = 'Exctract Camera'
|
||||
label = 'Extract Camera'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["camera"]
|
||||
hosts = ["nuke"]
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import (
|
|||
|
||||
|
||||
class ExtractModel(publish.Extractor):
|
||||
""" 3D model exctractor
|
||||
""" 3D model extractor
|
||||
"""
|
||||
label = 'Exctract Model'
|
||||
label = 'Extract Model'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["model"]
|
||||
hosts = ["nuke"]
|
||||
|
|
|
|||
|
|
@ -249,7 +249,7 @@ class ExtractSlateFrame(publish.Extractor):
|
|||
|
||||
# Add file to representation files
|
||||
# - get write node
|
||||
write_node = instance.data["writeNode"]
|
||||
write_node = instance.data["transientData"]["writeNode"]
|
||||
# - evaluate filepaths for first frame and slate frame
|
||||
first_filename = os.path.basename(
|
||||
write_node["file"].evaluate(first_frame))
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ class ExtractThumbnail(publish.Extractor):
|
|||
def render_thumbnail(self, instance, output_name=None, **kwargs):
|
||||
first_frame = instance.data["frameStartHandle"]
|
||||
last_frame = instance.data["frameEndHandle"]
|
||||
colorspace = instance.data["colorspace"]
|
||||
|
||||
# find frame range and define middle thumb frame
|
||||
mid_frame = int((last_frame - first_frame) / 2)
|
||||
|
|
@ -112,8 +113,8 @@ class ExtractThumbnail(publish.Extractor):
|
|||
if self.use_rendered and os.path.isfile(path_render):
|
||||
# check if file exist otherwise connect to write node
|
||||
rnode = nuke.createNode("Read")
|
||||
|
||||
rnode["file"].setValue(path_render)
|
||||
rnode["colorspace"].setValue(colorspace)
|
||||
|
||||
# turn it raw if none of baking is ON
|
||||
if all([
|
||||
|
|
|
|||
|
|
@ -90,7 +90,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
|
|
@ -123,13 +122,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
for item in context:
|
||||
if "workfile" in item.data["families"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
for item_ in context:
|
||||
if "workfile" in item_.data["family"]:
|
||||
template_data = item_.data.get("anatomyData")
|
||||
rep = item_.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
|
|
@ -141,19 +137,24 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
# only add main rendering job if target is not frames_farm
|
||||
r_job_response_json = None
|
||||
if instance.data["render_target"] != "frames_farm":
|
||||
r_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
r_job_response_json = r_job_response.json()
|
||||
instance.data["deadlineSubmissionJob"] = r_job_response_json
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakingNukeScripts"):
|
||||
for baking_script in instance.data["bakingNukeScripts"]:
|
||||
|
|
@ -161,18 +162,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
script_path = baking_script["bakeScriptPath"]
|
||||
exe_node_name = baking_script["bakeWriteNodeName"]
|
||||
|
||||
resp = self.payload_submit(
|
||||
b_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
submit_frame_start,
|
||||
submit_frame_end,
|
||||
response.json()
|
||||
r_job_response_json,
|
||||
baking_submission=True
|
||||
)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = resp.json()
|
||||
instance.data["deadlineSubmissionJob"] = b_job_response.json()
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# add to list of job Id
|
||||
|
|
@ -180,7 +183,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["bakingSubmissionJobs"] = []
|
||||
|
||||
instance.data["bakingSubmissionJobs"].append(
|
||||
resp.json()["_id"])
|
||||
b_job_response.json()["_id"])
|
||||
|
||||
# redefinition of families
|
||||
if "render" in instance.data["family"]:
|
||||
|
|
@ -199,15 +202,35 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
response_data=None
|
||||
response_data=None,
|
||||
baking_submission=False,
|
||||
):
|
||||
"""Submit payload to Deadline
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
script_path (str): path to nuke script
|
||||
render_path (str): path to rendered images
|
||||
exe_node_name (str): name of the node to render
|
||||
start_frame (int): start frame
|
||||
end_frame (int): end frame
|
||||
response_data Optional[dict]: response data from
|
||||
previous submission
|
||||
baking_submission Optional[bool]: if it's baking submission
|
||||
|
||||
Returns:
|
||||
requests.Response
|
||||
"""
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
batch_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (batch_name, instance.name)
|
||||
|
||||
# batch name
|
||||
src_filepath = instance.context.data["currentFile"]
|
||||
batch_name = os.path.basename(src_filepath)
|
||||
job_name = os.path.basename(render_path)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not response_data:
|
||||
|
|
@ -228,11 +251,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
# Top-level group name
|
||||
"BatchName": batch_name,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
# "AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
"Name": job_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
|
@ -294,12 +314,17 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
if response_data.get("_id"):
|
||||
# TODO: rewrite for baking with sequences
|
||||
if baking_submission:
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
# Include critical environment variables with submission
|
||||
|
|
|
|||
|
|
@ -98,7 +98,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony"]
|
||||
|
||||
families = ["render.farm", "prerender.farm",
|
||||
families = ["render.farm", "render.frames_farm",
|
||||
"prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
|
|
@ -121,7 +122,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_USERNAME",
|
||||
"OPENPYPE_SG_USER"
|
||||
"OPENPYPE_SG_USER",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
|
|
@ -299,7 +302,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
else:
|
||||
elif job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
|
|
@ -475,6 +478,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer"]
|
||||
families = ["renderlayer", "render"]
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
order = pyblish.api.ValidatorOrder
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"render.frames_farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender"]
|
||||
|
|
|
|||
|
|
@ -20,8 +20,19 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
allow_user_override = True
|
||||
|
||||
def process(self, instance):
|
||||
self.instance = instance
|
||||
frame_list = self._get_frame_list(instance.data["render_job_id"])
|
||||
"""Process all the nodes in the instance"""
|
||||
|
||||
# get dependency jobs ids for retrieving frame list
|
||||
dependent_job_ids = self._get_dependent_job_ids(instance)
|
||||
|
||||
if not dependent_job_ids:
|
||||
self.log.warning("No dependent jobs found for instance: {}"
|
||||
"".format(instance))
|
||||
return
|
||||
|
||||
# get list of frames from dependent jobs
|
||||
frame_list = self._get_dependent_jobs_frames(
|
||||
instance, dependent_job_ids)
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
expected_files = self._get_expected_files(repre)
|
||||
|
|
@ -78,26 +89,45 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
)
|
||||
)
|
||||
|
||||
def _get_frame_list(self, original_job_id):
|
||||
def _get_dependent_job_ids(self, instance):
|
||||
"""Returns list of dependent job ids from instance metadata.json
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
(list): list of dependent job ids
|
||||
|
||||
"""
|
||||
dependent_job_ids = []
|
||||
|
||||
# job_id collected from metadata.json
|
||||
original_job_id = instance.data["render_job_id"]
|
||||
|
||||
dependent_job_ids_env = os.environ.get("RENDER_JOB_IDS")
|
||||
if dependent_job_ids_env:
|
||||
dependent_job_ids = dependent_job_ids_env.split(',')
|
||||
elif original_job_id:
|
||||
dependent_job_ids = [original_job_id]
|
||||
|
||||
return dependent_job_ids
|
||||
|
||||
def _get_dependent_jobs_frames(self, instance, dependent_job_ids):
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
||||
Render job might be re-submitted so job_id in metadata.json could be
|
||||
invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS.
|
||||
|
||||
Args:
|
||||
original_job_id (str)
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
dependent_job_ids (list): list of dependent job ids
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
all_frame_lists = []
|
||||
render_job_ids = os.environ.get("RENDER_JOB_IDS")
|
||||
if render_job_ids:
|
||||
render_job_ids = render_job_ids.split(',')
|
||||
else: # fallback
|
||||
render_job_ids = [original_job_id]
|
||||
|
||||
for job_id in render_job_ids:
|
||||
job_info = self._get_job_info(job_id)
|
||||
for job_id in dependent_job_ids:
|
||||
job_info = self._get_job_info(instance, job_id)
|
||||
frame_list = job_info["Props"].get("Frames")
|
||||
if frame_list:
|
||||
all_frame_lists.extend(frame_list.split(','))
|
||||
|
|
@ -152,18 +182,25 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
|
||||
return file_name_template, frame_placeholder
|
||||
|
||||
def _get_job_info(self, job_id):
|
||||
def _get_job_info(self, instance, job_id):
|
||||
"""Calls DL for actual job info for 'job_id'
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
job_id (str): Deadline job id
|
||||
|
||||
Returns:
|
||||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = self.instance.context.data["defaultDeadline"]
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if self.instance.data.get("deadlineUrl"):
|
||||
deadline_url = self.instance.data.get("deadlineUrl")
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
|
|||
|
|
@ -8,13 +8,14 @@ from Deadline.Scripting import *
|
|||
|
||||
def GetDeadlinePlugin():
|
||||
return HarmonyOpenPypePlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin( deadlinePlugin ):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
|
||||
class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
||||
|
||||
def __init__( self ):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
@ -24,11 +25,11 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
print("Cleanup")
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
|
||||
def CheckExitCode( self, exitCode ):
|
||||
print("check code")
|
||||
if exitCode != 0:
|
||||
|
|
@ -36,20 +37,20 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
self.LogInfo( "Renderer reported an error with error code 100. This will be ignored, since the option to ignore it is specified in the Job Properties." )
|
||||
else:
|
||||
self.FailRender( "Renderer returned non-zero error code %d. Check the renderer's output." % exitCode )
|
||||
|
||||
|
||||
def InitializeProcess( self ):
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
|
||||
self.AddStdoutHandlerCallback( "Rendered frame ([0-9]+)" ).HandleCallback += self.HandleStdoutProgress
|
||||
|
||||
|
||||
def HandleStdoutProgress( self ):
|
||||
startFrame = self.GetStartFrame()
|
||||
endFrame = self.GetEndFrame()
|
||||
if( endFrame - startFrame + 1 != 0 ):
|
||||
self.SetProgress( 100 * ( int(self.GetRegexMatch(1)) - startFrame + 1 ) / ( endFrame - startFrame + 1 ) )
|
||||
|
||||
|
||||
def RenderExecutable( self ):
|
||||
version = int( self.GetPluginInfoEntry( "Version" ) )
|
||||
exe = ""
|
||||
|
|
@ -58,7 +59,7 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
if( exe == "" ):
|
||||
self.FailRender( "Harmony render executable was not found in the configured separated list \"" + exeList + "\". The path to the render executable can be configured from the Plugin Configuration in the Deadline Monitor." )
|
||||
return exe
|
||||
|
||||
|
||||
def RenderArgument( self ):
|
||||
renderArguments = "-batch"
|
||||
|
||||
|
|
@ -72,20 +73,20 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
resolutionX = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionX", -1 )
|
||||
resolutionY = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionY", -1 )
|
||||
fov = self.GetFloatPluginInfoEntryWithDefault( "FieldOfView", -1 )
|
||||
|
||||
|
||||
if resolutionX > 0 and resolutionY > 0 and fov > 0:
|
||||
renderArguments += " -res " + str( resolutionX ) + " " + str( resolutionY ) + " " + str( fov )
|
||||
|
||||
|
||||
camera = self.GetPluginInfoEntryWithDefault( "Camera", "" )
|
||||
|
||||
|
||||
if not camera == "":
|
||||
renderArguments += " -camera " + camera
|
||||
|
||||
|
||||
startFrame = str( self.GetStartFrame() )
|
||||
endFrame = str( self.GetEndFrame() )
|
||||
|
||||
|
||||
renderArguments += " -frames " + startFrame + " " + endFrame
|
||||
|
||||
|
||||
if not self.GetBooleanPluginInfoEntryWithDefault( "IsDatabase", False ):
|
||||
sceneFilename = self.GetPluginInfoEntryWithDefault( "SceneFile", self.GetDataFilename() )
|
||||
sceneFilename = RepositoryUtils.CheckPathMapping( sceneFilename )
|
||||
|
|
@ -99,12 +100,12 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
renderArguments += " -scene " + scene
|
||||
version = self.GetPluginInfoEntryWithDefault( "SceneVersion", "" )
|
||||
renderArguments += " -version " + version
|
||||
|
||||
|
||||
#tempSceneDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
#preRenderScript =
|
||||
#preRenderScript =
|
||||
rendernodeNum = 0
|
||||
scriptBuilder = StringBuilder()
|
||||
|
||||
|
||||
while True:
|
||||
nodeName = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Node", "" )
|
||||
if nodeName == "":
|
||||
|
|
@ -115,35 +116,35 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
nodeLeadingZero = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "LeadingZero", "" )
|
||||
nodeFormat = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Format", "" )
|
||||
nodeStartFrame = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "StartFrame", "" )
|
||||
|
||||
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingName\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
|
||||
if not nodeLeadingZero == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"leadingZeros\", 1, \"" + nodeLeadingZero + "\" );")
|
||||
|
||||
|
||||
if not nodeFormat == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingType\", 1, \"" + nodeFormat + "\" );")
|
||||
|
||||
|
||||
if not nodeStartFrame == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"start\", 1, \"" + nodeStartFrame + "\" );")
|
||||
|
||||
|
||||
if nodeType == "Movie":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"moviePath\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
|
||||
rendernodeNum += 1
|
||||
|
||||
|
||||
tempDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
preRenderScriptName = Path.Combine( tempDirectory, "preRenderScript.txt" )
|
||||
|
||||
|
||||
File.WriteAllText( preRenderScriptName, scriptBuilder.ToString() )
|
||||
|
||||
|
||||
preRenderInlineScript = self.GetPluginInfoEntryWithDefault( "PreRenderInlineScript", "" )
|
||||
if preRenderInlineScript:
|
||||
renderArguments += " -preRenderInlineScript \"" + preRenderInlineScript +"\""
|
||||
|
||||
|
||||
renderArguments += " -preRenderScript \"" + preRenderScriptName +"\""
|
||||
|
||||
|
||||
return renderArguments
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
|
|||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
@ -107,7 +108,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
|
|||
"Scanning for compatible requested "
|
||||
f"version {requested_version}"))
|
||||
dir_list = self.GetConfigEntry("OpenPypeInstallationDirs")
|
||||
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
dir_list = dir_list.replace("\\ ", " ")
|
||||
|
|
|
|||
|
|
@ -249,6 +249,7 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
|
||||
def __init__(self):
|
||||
"""Init."""
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.initialize_process
|
||||
self.RenderExecutableCallback += self.render_executable
|
||||
self.RenderArgumentCallback += self.render_argument
|
||||
|
|
|
|||
|
|
@ -353,7 +353,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
status_name = asset_version_data.pop("status_name", None)
|
||||
|
||||
# Try query asset version by criteria (asset id and version)
|
||||
version = asset_version_data.get("version") or 0
|
||||
version = asset_version_data.get("version") or "0"
|
||||
asset_version_entity = self._query_asset_version(
|
||||
session, version, asset_id
|
||||
)
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def get_transferable_representations(instance):
|
|||
to_transfer = []
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
if "publish_on_farm" not in representation.get("tags"):
|
||||
if "publish_on_farm" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
trans_rep = representation.copy()
|
||||
|
|
@ -265,8 +265,7 @@ def create_skeleton_instance(
|
|||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
representations = get_transferable_representations(instance)
|
||||
instance_skeleton_data["representations"] = []
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instance_skeleton_data["representations"] = representations
|
||||
|
||||
persistent = instance.data.get("stagingDir_persistent") is True
|
||||
instance_skeleton_data["stagingDir_persistent"] = persistent
|
||||
|
|
|
|||
|
|
@ -53,8 +53,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
"flame",
|
||||
"houdini",
|
||||
"max",
|
||||
"blender"
|
||||
# "resolve"
|
||||
"blender",
|
||||
"unreal"
|
||||
]
|
||||
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
if thumbnail_created:
|
||||
return full_output_path
|
||||
|
||||
self.log.warning("Thumbanil has not been created.")
|
||||
self.log.warning("Thumbnail has not been created.")
|
||||
|
||||
def _instance_has_thumbnail(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -147,6 +147,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
"-a", src_path,
|
||||
"--ch", "R,G,B",
|
||||
"-o", dst_path
|
||||
)
|
||||
self.log.info("Running: {}".format(" ".join(oiio_cmd)))
|
||||
|
|
|
|||
|
|
@ -343,6 +343,7 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
return self._input_widget.text()
|
||||
|
||||
def set_value(self, value, multivalue=False):
|
||||
block_signals = False
|
||||
if multivalue:
|
||||
set_value = set(value)
|
||||
if None in set_value:
|
||||
|
|
@ -352,13 +353,18 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
if len(set_value) == 1:
|
||||
value = tuple(set_value)[0]
|
||||
else:
|
||||
block_signals = True
|
||||
value = "< Multiselection >"
|
||||
|
||||
if value != self.current_value():
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(True)
|
||||
if self.multiline:
|
||||
self._input_widget.setPlainText(value)
|
||||
else:
|
||||
self._input_widget.setText(value)
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(False)
|
||||
|
||||
|
||||
class BoolAttrWidget(_BaseAttrDefWidget):
|
||||
|
|
@ -391,7 +397,9 @@ class BoolAttrWidget(_BaseAttrDefWidget):
|
|||
set_value.add(self.attr_def.default)
|
||||
|
||||
if len(set_value) > 1:
|
||||
self._input_widget.blockSignals(True)
|
||||
self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked)
|
||||
self._input_widget.blockSignals(False)
|
||||
return
|
||||
value = tuple(set_value)[0]
|
||||
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
def make_sure_animation_is_finished(self):
|
||||
if self._change_anim.state() == QtCore.QAbstractAnimation.Running:
|
||||
self._change_anim.stop()
|
||||
self._on_change_anim_finished()
|
||||
self._on_change_anim_finished()
|
||||
|
||||
def set_state(self, new_state, animate):
|
||||
if new_state == self._current_state:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue