mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Merge branch 'develop' into feature/OP-5871_Max-Review-feature
This commit is contained in:
commit
582bdb51a2
21 changed files with 259 additions and 74 deletions
|
|
@ -134,6 +134,27 @@ def append_user_scripts():
|
|||
traceback.print_exc()
|
||||
|
||||
|
||||
def set_app_templates_path():
|
||||
# Blender requires the app templates to be in `BLENDER_USER_SCRIPTS`.
|
||||
# After running Blender, we set that variable to our custom path, so
|
||||
# that the user can use their custom app templates.
|
||||
|
||||
# We look among the scripts paths for one of the paths that contains
|
||||
# the app templates. The path must contain the subfolder
|
||||
# `startup/bl_app_templates_user`.
|
||||
paths = os.environ.get("OPENPYPE_BLENDER_USER_SCRIPTS").split(os.pathsep)
|
||||
|
||||
app_templates_path = None
|
||||
for path in paths:
|
||||
if os.path.isdir(
|
||||
os.path.join(path, "startup", "bl_app_templates_user")):
|
||||
app_templates_path = path
|
||||
break
|
||||
|
||||
if app_templates_path and os.path.isdir(app_templates_path):
|
||||
os.environ["BLENDER_USER_SCRIPTS"] = app_templates_path
|
||||
|
||||
|
||||
def imprint(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
|
||||
r"""Write `data` to `node` as userDefined attributes
|
||||
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ def install():
|
|||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
lib.append_user_scripts()
|
||||
lib.set_app_templates_path()
|
||||
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("open", on_open)
|
||||
|
|
|
|||
|
|
@ -207,7 +207,9 @@ class MaxCreator(Creator, MaxCreatorBase):
|
|||
|
||||
"""
|
||||
for instance in instances:
|
||||
if instance_node := rt.GetNodeByName(instance.data.get("instance_node")): # noqa
|
||||
instance_node = rt.GetNodeByName(
|
||||
instance.data.get("instance_node"))
|
||||
if instance_node:
|
||||
count = rt.custAttributes.count(instance_node)
|
||||
rt.custAttributes.delete(instance_node, count)
|
||||
rt.Delete(instance_node)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ class CreateRender(plugin.MaxCreator):
|
|||
instance_data,
|
||||
pre_create_data)
|
||||
container_name = instance.data.get("instance_node")
|
||||
if sel_obj := self.selected_nodes:
|
||||
sel_obj = self.selected_nodes
|
||||
if sel_obj:
|
||||
# set viewport camera for rendering(mandatory for deadline)
|
||||
RenderSettings(self.project_settings).set_render_camera(sel_obj)
|
||||
# set output paths for rendering(mandatory for deadline)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,8 @@ class ValidateCameraContent(pyblish.api.InstancePlugin):
|
|||
"$Physical_Camera", "$Target"]
|
||||
|
||||
def process(self, instance):
|
||||
if invalid := self.get_invalid(instance): # noqa
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(("Camera instance must only include"
|
||||
"camera (and camera target). "
|
||||
f"Invalid content {invalid}"))
|
||||
|
|
|
|||
|
|
@ -18,7 +18,8 @@ class ValidateModelContent(pyblish.api.InstancePlugin):
|
|||
label = "Model Contents"
|
||||
|
||||
def process(self, instance):
|
||||
if invalid := self.get_invalid(instance): # noqa
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(("Model instance must only include"
|
||||
"Geometry and Editable Mesh. "
|
||||
f"Invalid types on: {invalid}"))
|
||||
|
|
|
|||
|
|
@ -35,12 +35,16 @@ class ValidatePointCloud(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
report = []
|
||||
if invalid := self.get_tyflow_object(instance): # noqa
|
||||
report.append(f"Non tyFlow object found: {invalid}")
|
||||
|
||||
if invalid := self.get_tyflow_operator(instance): # noqa
|
||||
report.append(
|
||||
f"tyFlow ExportParticle operator not found: {invalid}")
|
||||
invalid_object = self.get_tyflow_object(instance)
|
||||
if invalid_object:
|
||||
report.append(f"Non tyFlow object found: {invalid_object}")
|
||||
|
||||
invalid_operator = self.get_tyflow_operator(instance)
|
||||
if invalid_operator:
|
||||
report.append((
|
||||
"tyFlow ExportParticle operator not "
|
||||
f"found: {invalid_operator}"))
|
||||
|
||||
if self.validate_export_mode(instance):
|
||||
report.append("The export mode is not at PRT")
|
||||
|
|
@ -49,9 +53,10 @@ class ValidatePointCloud(pyblish.api.InstancePlugin):
|
|||
report.append(("tyFlow Partition setting is "
|
||||
"not at the default value"))
|
||||
|
||||
if invalid := self.validate_custom_attribute(instance): # noqa
|
||||
invalid_attribute = self.validate_custom_attribute(instance)
|
||||
if invalid_attribute:
|
||||
report.append(("Custom Attribute not found "
|
||||
f":{invalid}"))
|
||||
f":{invalid_attribute}"))
|
||||
|
||||
if report:
|
||||
raise PublishValidationError(f"{report}")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import maya.cmds as cmds
|
||||
import xgenm
|
||||
|
|
@ -116,8 +117,8 @@ class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
def update(self, container, representation):
|
||||
"""Workflow for updating Xgen.
|
||||
|
||||
- Copy and potentially overwrite the workspace .xgen file.
|
||||
- Export changes to delta file.
|
||||
- Copy and overwrite the workspace .xgen file.
|
||||
- Set collection attributes to not include delta files.
|
||||
- Update xgen maya file reference.
|
||||
- Apply the delta file changes.
|
||||
|
|
@ -130,6 +131,10 @@ class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
There is an implicit increment of the xgen and delta files, due to
|
||||
using the workfile basename.
|
||||
"""
|
||||
# Storing current description to try and maintain later.
|
||||
current_description = (
|
||||
xgenm.xgGlobal.DescriptionEditor.currentDescription()
|
||||
)
|
||||
|
||||
container_node = container["objectName"]
|
||||
members = get_container_members(container_node)
|
||||
|
|
@ -160,6 +165,7 @@ class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
data_path
|
||||
)
|
||||
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
|
||||
shutil.copy(new_xgen_file, xgen_file)
|
||||
write_xgen_file(data, xgen_file)
|
||||
|
||||
attribute_data = {
|
||||
|
|
@ -171,3 +177,11 @@ class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
super().update(container, representation)
|
||||
|
||||
xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file)
|
||||
|
||||
# Restore current selected description if it exists.
|
||||
if cmds.objExists(current_description):
|
||||
xgenm.xgGlobal.DescriptionEditor.setCurrentDescription(
|
||||
current_description
|
||||
)
|
||||
# Full UI refresh.
|
||||
xgenm.xgGlobal.DescriptionEditor.refresh("Full")
|
||||
|
|
|
|||
|
|
@ -18,18 +18,14 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
members = cmds.sets(objset, query=True)
|
||||
members = cmds.ls(members, long=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if objset.endswith("content_SET"):
|
||||
members = cmds.ls(members, long=True)
|
||||
children = get_all_children(members)
|
||||
instance.data["contentMembers"] = children
|
||||
self.log.debug("content members: {}".format(children))
|
||||
elif objset.endswith("proxy_SET"):
|
||||
set_members = get_all_children(cmds.ls(members, long=True))
|
||||
instance.data["proxy"] = set_members
|
||||
self.log.debug("proxy members: {}".format(set_members))
|
||||
instance.data["contentMembers"] = self.get_hierarchy(members)
|
||||
if objset.endswith("proxy_SET"):
|
||||
instance.data["proxy"] = self.get_hierarchy(members)
|
||||
|
||||
# Use camera in object set if present else default to render globals
|
||||
# camera.
|
||||
|
|
@ -48,3 +44,13 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
|||
self.log.debug("No renderable cameras found.")
|
||||
|
||||
self.log.debug("data: {}".format(instance.data))
|
||||
|
||||
def get_hierarchy(self, nodes):
|
||||
"""Return nodes with all their children"""
|
||||
nodes = cmds.ls(nodes, long=True)
|
||||
if not nodes:
|
||||
return []
|
||||
children = get_all_children(nodes)
|
||||
# Make sure nodes merged with children only
|
||||
# contains unique entries
|
||||
return list(set(nodes + children))
|
||||
|
|
|
|||
|
|
@ -30,12 +30,12 @@ class CollectXgen(pyblish.api.InstancePlugin):
|
|||
if data["xgmPalettes"]:
|
||||
data["xgmPalette"] = data["xgmPalettes"][0]
|
||||
|
||||
data["xgenConnections"] = {}
|
||||
data["xgenConnections"] = set()
|
||||
for node in data["xgmSubdPatches"]:
|
||||
data["xgenConnections"][node] = {}
|
||||
for attr in ["transform", "geometry"]:
|
||||
input = get_attribute_input("{}.{}".format(node, attr))
|
||||
data["xgenConnections"][node][attr] = input
|
||||
connected_transform = get_attribute_input(
|
||||
node + ".transform"
|
||||
).split(".")[0]
|
||||
data["xgenConnections"].add(connected_transform)
|
||||
|
||||
# Collect all files under palette root as resources.
|
||||
import xgenm
|
||||
|
|
|
|||
|
|
@ -109,6 +109,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
return
|
||||
|
||||
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
|
||||
|
||||
filenames, _ = self._extract(
|
||||
instance.data["proxy"], attribute_data, kwargs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ class ExtractWorkfileXgen(publish.Extractor):
|
|||
continue
|
||||
|
||||
render_start_frame = instance.data["frameStart"]
|
||||
render_end_frame = instance.data["frameStart"]
|
||||
render_end_frame = instance.data["frameEnd"]
|
||||
|
||||
if start_frame is None:
|
||||
start_frame = render_start_frame
|
||||
|
|
|
|||
|
|
@ -51,11 +51,9 @@ class ExtractXgen(publish.Extractor):
|
|||
with delete_after() as delete_bin:
|
||||
duplicate_nodes = []
|
||||
# Collect nodes to export.
|
||||
for _, connections in instance.data["xgenConnections"].items():
|
||||
transform_name = connections["transform"].split(".")[0]
|
||||
|
||||
for node in instance.data["xgenConnections"]:
|
||||
# Duplicate_transform subd patch geometry.
|
||||
duplicate_transform = cmds.duplicate(transform_name)[0]
|
||||
duplicate_transform = cmds.duplicate(node)[0]
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
# Discard the children.
|
||||
|
|
@ -88,6 +86,18 @@ class ExtractXgen(publish.Extractor):
|
|||
|
||||
delete_bin.append(palette)
|
||||
|
||||
# Copy shading assignments.
|
||||
nodes = (
|
||||
instance.data["xgmDescriptions"] +
|
||||
instance.data["xgmSubdPatches"]
|
||||
)
|
||||
for node in nodes:
|
||||
target_node = node.split(":")[-1]
|
||||
shading_engine = cmds.listConnections(
|
||||
node, type="shadingEngine"
|
||||
)[0]
|
||||
cmds.sets(target_node, edit=True, forceElement=shading_engine)
|
||||
|
||||
# Export duplicated palettes.
|
||||
xgenm.exportPalette(palette, xgen_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -61,9 +61,7 @@ class ValidateXgen(pyblish.api.InstancePlugin):
|
|||
# We need a namespace else there will be a naming conflict when
|
||||
# extracting because of stripping namespaces and parenting to world.
|
||||
node_names = [instance.data["xgmPalette"]]
|
||||
for _, connections in instance.data["xgenConnections"].items():
|
||||
node_names.append(connections["transform"].split(".")[0])
|
||||
|
||||
node_names.extend(instance.data["xgenConnections"])
|
||||
non_namespaced_nodes = [n for n in node_names if ":" not in n]
|
||||
if non_namespaced_nodes:
|
||||
raise PublishValidationError(
|
||||
|
|
|
|||
|
|
@ -2069,6 +2069,35 @@ class WorkfileSettings(object):
|
|||
log.debug("nuke.root()['{}'] changed to: {}".format(
|
||||
knob, value_))
|
||||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
current_ocio_path = os.getenv("OCIO")
|
||||
if current_ocio_path != config_data["path"]:
|
||||
message = """
|
||||
It seems like there's a mismatch between the OCIO config path set in your Nuke
|
||||
settings and the actual path set in your OCIO environment.
|
||||
|
||||
To resolve this, please follow these steps:
|
||||
1. Close Nuke if it's currently open.
|
||||
2. Reopen Nuke.
|
||||
|
||||
Please note the paths for your reference:
|
||||
|
||||
- The OCIO environment path currently set:
|
||||
`{env_path}`
|
||||
|
||||
- The path in your current Nuke settings:
|
||||
`{settings_path}`
|
||||
|
||||
Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
||||
"""
|
||||
nuke.message(
|
||||
message.format(
|
||||
env_path=current_ocio_path,
|
||||
settings_path=config_data["path"]
|
||||
)
|
||||
)
|
||||
|
||||
def set_writes_colorspace(self):
|
||||
''' Adds correct colorspace to write node dict
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
""" OpenPype custom script for setting up write nodes for non-publish """
|
||||
import os
|
||||
import nuke
|
||||
from openpype.hosts.nuke.api.lib import set_node_knobs_from_settings
|
||||
import nukescripts
|
||||
from openpype.pipeline import Anatomy
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
set_node_knobs_from_settings,
|
||||
get_nuke_imageio_settings
|
||||
)
|
||||
|
||||
|
||||
frame_padding = 5
|
||||
temp_rendering_path_template = (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}")
|
||||
|
||||
|
|
@ -53,24 +58,94 @@ knobs_setting = {
|
|||
}
|
||||
|
||||
|
||||
def main():
|
||||
write_selected_nodes = [
|
||||
s for s in nuke.selectedNodes() if s.Class() == "Write"]
|
||||
class WriteNodeKnobSettingPanel(nukescripts.PythonPanel):
|
||||
""" Write Node's Knobs Settings Panel """
|
||||
def __init__(self):
|
||||
nukescripts.PythonPanel.__init__(self, "Set Knobs Value(Write Node)")
|
||||
|
||||
ext = None
|
||||
knobs = knobs_setting["knobs"]
|
||||
for knob in knobs:
|
||||
if knob["name"] == "file_type":
|
||||
ext = knob["value"]
|
||||
for w in write_selected_nodes:
|
||||
# data for mapping the path
|
||||
data = {
|
||||
"work": os.getenv("AVALON_WORKDIR"),
|
||||
"subset": w["name"].value(),
|
||||
"frame": "#" * frame_padding,
|
||||
"ext": ext
|
||||
}
|
||||
file_path = temp_rendering_path_template.format(**data)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
w["file"].setValue(file_path)
|
||||
set_node_knobs_from_settings(w, knobs)
|
||||
preset_name, _ = self.get_node_knobs_setting()
|
||||
# create knobs
|
||||
|
||||
self.selected_preset_name = nuke.Enumeration_Knob(
|
||||
'preset_selector', 'presets', preset_name)
|
||||
# add knobs to panel
|
||||
self.addKnob(self.selected_preset_name)
|
||||
|
||||
def process(self):
|
||||
""" Process the panel values. """
|
||||
write_selected_nodes = [
|
||||
selected_nodes for selected_nodes in nuke.selectedNodes()
|
||||
if selected_nodes.Class() == "Write"]
|
||||
|
||||
selected_preset = self.selected_preset_name.value()
|
||||
ext = None
|
||||
knobs = knobs_setting["knobs"]
|
||||
preset_name, node_knobs_presets = (
|
||||
self.get_node_knobs_setting(selected_preset)
|
||||
)
|
||||
|
||||
if selected_preset and preset_name:
|
||||
if not node_knobs_presets:
|
||||
nuke.message(
|
||||
"No knobs value found in subset group.."
|
||||
"\nDefault setting will be used..")
|
||||
else:
|
||||
knobs = node_knobs_presets
|
||||
|
||||
ext_knob_list = [knob for knob in knobs if knob["name"] == "file_type"]
|
||||
if not ext_knob_list:
|
||||
nuke.message(
|
||||
"ERROR: No file type found in the subset's knobs."
|
||||
"\nPlease add one to complete setting up the node")
|
||||
return
|
||||
else:
|
||||
for knob in ext_knob_list:
|
||||
ext = knob["value"]
|
||||
|
||||
anatomy = Anatomy()
|
||||
|
||||
frame_padding = int(
|
||||
anatomy.templates["render"].get(
|
||||
"frame_padding"
|
||||
)
|
||||
)
|
||||
for write_node in write_selected_nodes:
|
||||
# data for mapping the path
|
||||
data = {
|
||||
"work": os.getenv("AVALON_WORKDIR"),
|
||||
"subset": write_node["name"].value(),
|
||||
"frame": "#" * frame_padding,
|
||||
"ext": ext
|
||||
}
|
||||
file_path = temp_rendering_path_template.format(**data)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
write_node["file"].setValue(file_path)
|
||||
set_node_knobs_from_settings(write_node, knobs)
|
||||
|
||||
def get_node_knobs_setting(self, selected_preset=None):
|
||||
preset_name = []
|
||||
knobs_nodes = []
|
||||
settings = [
|
||||
node_settings for node_settings
|
||||
in get_nuke_imageio_settings()["nodes"]["overrideNodes"]
|
||||
if node_settings["nukeNodeClass"] == "Write"
|
||||
and node_settings["subsets"]
|
||||
]
|
||||
if not settings:
|
||||
return
|
||||
|
||||
for i, _ in enumerate(settings):
|
||||
if selected_preset in settings[i]["subsets"]:
|
||||
knobs_nodes = settings[i]["knobs"]
|
||||
|
||||
for setting in settings:
|
||||
for subset in setting["subsets"]:
|
||||
preset_name.append(subset)
|
||||
|
||||
return preset_name, knobs_nodes
|
||||
|
||||
|
||||
def main():
|
||||
p_ = WriteNodeKnobSettingPanel()
|
||||
if p_.showModalDialog():
|
||||
print(p_.process())
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from openpype.pipeline import (
|
|||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.pipeline.farm.patterning import match_aov_pattern
|
||||
from openpype.lib import is_running_from_build
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
def get_resources(project_name, version, extension=None):
|
||||
|
|
@ -79,7 +80,8 @@ def get_resource_files(resources, frame_range=None):
|
|||
return list(res_collection)
|
||||
|
||||
|
||||
class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||
class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Process Job submitted on farm.
|
||||
|
||||
These jobs are dependent on a deadline or muster job
|
||||
|
|
@ -598,7 +600,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
self.log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files, do_not_add_review):
|
||||
def _get_representations(self, instance_data, exp_files,
|
||||
do_not_add_review):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
|
|
@ -606,7 +609,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
most cases, but if not - we create representation from each of them.
|
||||
|
||||
Arguments:
|
||||
instance (dict): instance data for which we are
|
||||
instance_data (dict): instance.data for which we are
|
||||
setting representations
|
||||
exp_files (list): list of expected files
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
|
@ -628,9 +631,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# expected files contains more explicitly and from what
|
||||
# should be review made.
|
||||
# - "review" tag is never added when is set to 'False'
|
||||
if instance["useSequenceForReview"]:
|
||||
if instance_data["useSequenceForReview"]:
|
||||
# toggle preview on if multipart is on
|
||||
if instance.get("multipartExr", False):
|
||||
if instance_data.get("multipartExr", False):
|
||||
self.log.debug(
|
||||
"Adding preview tag because its multipartExr"
|
||||
)
|
||||
|
|
@ -655,8 +658,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
frame_start = int(instance.get("frameStartHandle"))
|
||||
if instance.get("slate"):
|
||||
frame_start = int(instance_data.get("frameStartHandle"))
|
||||
if instance_data.get("slate"):
|
||||
frame_start -= 1
|
||||
|
||||
preview = preview and not do_not_add_review
|
||||
|
|
@ -665,10 +668,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(collection)],
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": int(instance.get("frameEndHandle")),
|
||||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"fps": instance.get("fps"),
|
||||
"fps": instance_data.get("fps"),
|
||||
"tags": ["review"] if preview else [],
|
||||
}
|
||||
|
||||
|
|
@ -676,17 +679,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if ext in self.skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
||||
if instance.get("multipartExr", False):
|
||||
if instance_data.get("multipartExr", False):
|
||||
rep["tags"].append("multipartExr")
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance.get("convertToScanline"):
|
||||
if instance_data.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
representations.append(rep)
|
||||
|
||||
self._solve_families(instance, preview)
|
||||
self._solve_families(instance_data, preview)
|
||||
|
||||
# add remainders as representations
|
||||
for remainder in remainders:
|
||||
|
|
@ -717,13 +720,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
"fps": instance_data.get("fps"),
|
||||
"tags": ["review"]
|
||||
})
|
||||
self._solve_families(instance, preview)
|
||||
self._solve_families(instance_data, preview)
|
||||
|
||||
already_there = False
|
||||
for repre in instance.get("representations", []):
|
||||
for repre in instance_data.get("representations", []):
|
||||
# might be added explicitly before by publish_on_farm
|
||||
already_there = repre.get("files") == rep["files"]
|
||||
if already_there:
|
||||
|
|
@ -733,6 +736,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if not already_there:
|
||||
representations.append(rep)
|
||||
|
||||
for rep in representations:
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
rep, self.context,
|
||||
colorspace=instance_data["colorspace"]
|
||||
)
|
||||
|
||||
return representations
|
||||
|
||||
def _solve_families(self, instance, preview=False):
|
||||
|
|
@ -861,7 +871,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", [])))
|
||||
"inputVersions": list(map(str, data.get("inputVersions", []))),
|
||||
"colorspace": instance.data.get("colorspace")
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
|
|
|
|||
|
|
@ -375,7 +375,11 @@ def get_imageio_config(
|
|||
# This is for backward compatibility.
|
||||
# TODO: in future rewrite this to be more explicit
|
||||
activate_host_color_management = imageio_host.get(
|
||||
"activate_host_color_management", True)
|
||||
"activate_host_color_management")
|
||||
|
||||
# TODO: remove this in future - backward compatibility
|
||||
if activate_host_color_management is None:
|
||||
activate_host_color_management = host_ocio_config.get("enabled", False)
|
||||
|
||||
if not activate_host_color_management:
|
||||
# if host settings are disabled return False because
|
||||
|
|
|
|||
|
|
@ -184,6 +184,11 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
if tag == "review":
|
||||
added_review = True
|
||||
|
||||
# If there is only 1 file outputted then convert list to
|
||||
# string, cause that'll indicate that its not a sequence.
|
||||
if len(new_repre["files"]) == 1:
|
||||
new_repre["files"] = new_repre["files"][0]
|
||||
|
||||
new_representations.append(new_repre)
|
||||
added_representations = True
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.11-nightly.3"
|
||||
__version__ = "3.15.11-nightly.4"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue