mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/deadline-remove-toBeRenderedOn
This commit is contained in:
commit
1cc94537ba
54 changed files with 1082 additions and 250 deletions
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,9 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.5-nightly.3
|
||||
- 3.16.5-nightly.2
|
||||
- 3.16.5-nightly.1
|
||||
- 3.16.4
|
||||
- 3.16.4-nightly.3
|
||||
- 3.16.4-nightly.2
|
||||
|
|
@ -132,9 +135,6 @@ body:
|
|||
- 3.14.8
|
||||
- 3.14.8-nightly.4
|
||||
- 3.14.8-nightly.3
|
||||
- 3.14.8-nightly.2
|
||||
- 3.14.8-nightly.1
|
||||
- 3.14.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
|
|||
|
||||
#### Clone repository:
|
||||
```sh
|
||||
git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
|
||||
git clone --recurse-submodules git@github.com:ynput/OpenPype.git
|
||||
```
|
||||
|
||||
#### To build OpenPype:
|
||||
|
|
@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7
|
|||
|
||||
If all is successful, you'll find built OpenPype in `./build/` folder.
|
||||
|
||||
Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script.
|
||||
|
||||
This could be used even for building linux build (with argument `centos7` or `debian`)
|
||||
|
||||
#### Manual build
|
||||
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
|
||||
|
||||
|
|
|
|||
|
|
@ -83,10 +83,10 @@ def _get_subsets(
|
|||
project_name,
|
||||
subset_ids,
|
||||
subset_names,
|
||||
folder_ids,
|
||||
names_by_folder_ids,
|
||||
active,
|
||||
fields
|
||||
folder_ids=folder_ids,
|
||||
names_by_folder_ids=names_by_folder_ids,
|
||||
active=active,
|
||||
fields=fields,
|
||||
):
|
||||
yield convert_v4_subset_to_v3(subset)
|
||||
|
||||
|
|
|
|||
|
|
@ -45,6 +45,9 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
if config_data:
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ log = logging.getLogger(__name__)
|
|||
JSON_PREFIX = "JSON:::"
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
def get_asset_fps(asset_doc=None):
|
||||
"""Return current asset fps."""
|
||||
return get_current_project_asset()["data"].get("fps")
|
||||
|
||||
if asset_doc is None:
|
||||
asset_doc = get_current_project_asset(fields=["data.fps"])
|
||||
return asset_doc["data"]["fps"]
|
||||
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
|
|
@ -472,14 +475,19 @@ def maintained_selection():
|
|||
|
||||
|
||||
def reset_framerange():
|
||||
"""Set frame range to current asset"""
|
||||
"""Set frame range and FPS to current asset"""
|
||||
|
||||
# Get asset data
|
||||
project_name = get_current_project_name()
|
||||
asset_name = get_current_asset_name()
|
||||
# Get the asset ID from the database for the asset of current context
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
asset_data = asset_doc["data"]
|
||||
|
||||
# Get FPS
|
||||
fps = get_asset_fps(asset_doc)
|
||||
|
||||
# Get Start and End Frames
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
|
||||
|
|
@ -493,6 +501,9 @@ def reset_framerange():
|
|||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
# Set frame range and FPS
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from openpype.lib import (
|
|||
emit_event,
|
||||
)
|
||||
|
||||
from .lib import get_asset_fps
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
|
|
@ -385,11 +384,6 @@ def _set_context_settings():
|
|||
None
|
||||
"""
|
||||
|
||||
# Set new scene fps
|
||||
fps = get_asset_fps()
|
||||
print("Setting scene FPS to %i" % fps)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
lib.reset_framerange()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
parms["soppath"] = self.selected_nodes[0].path()
|
||||
parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
|
|
@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_sop_node_path(self, selected_node):
|
||||
"""Get Sop Path of the selected node.
|
||||
|
||||
Although Houdini allows ObjNode path on `sop_path` for the
|
||||
the ROP node, we prefer it set to the SopNode path explicitly.
|
||||
"""
|
||||
|
||||
# Allow sop level paths (e.g. /obj/geo1/box1)
|
||||
if isinstance(selected_node, hou.SopNode):
|
||||
self.log.debug(
|
||||
"Valid SopNode selection, 'SOP Path' in ROP will"
|
||||
" be set to '%s'.", selected_node.path()
|
||||
)
|
||||
return selected_node.path()
|
||||
|
||||
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
|
||||
# but do not allow other object level nodes types like cameras, etc.
|
||||
elif isinstance(selected_node, hou.ObjNode) and \
|
||||
selected_node.type().name() == "geo":
|
||||
|
||||
# Try to find output node.
|
||||
sop_node = self.get_obj_output(selected_node)
|
||||
if sop_node:
|
||||
self.log.debug(
|
||||
"Valid ObjNode selection, 'SOP Path' in ROP will "
|
||||
"be set to the child path '%s'.", sop_node.path()
|
||||
)
|
||||
return sop_node.path()
|
||||
|
||||
self.log.debug(
|
||||
"Selection isn't valid. 'SOP Path' in ROP will be empty."
|
||||
)
|
||||
return ""
|
||||
|
||||
def get_obj_output(self, obj_node):
|
||||
"""Try to find output node.
|
||||
|
||||
If any output nodes are present, return the output node with
|
||||
the minimum 'outputidx'
|
||||
If no output nodes are present, return the node with display flag
|
||||
If no nodes are present at all, return None
|
||||
"""
|
||||
|
||||
outputs = obj_node.subnetOutputs()
|
||||
|
||||
# if obj_node is empty
|
||||
if not outputs:
|
||||
return
|
||||
|
||||
# if obj_node has one output child whether its
|
||||
# sop output node or a node with the render flag
|
||||
elif len(outputs) == 1:
|
||||
return outputs[0]
|
||||
|
||||
# if there are more than one, then it has multiple output nodes
|
||||
# return the one with the minimum 'outputidx'
|
||||
else:
|
||||
return min(outputs,
|
||||
key=lambda node: node.evalParm('outputidx'))
|
||||
|
|
|
|||
|
|
@ -2,7 +2,19 @@
|
|||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<label>OpenPype</label>
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AVALON_LABEL") or "OpenPype"
|
||||
]]></labelExpression>
|
||||
<actionItem id="asset_name">
|
||||
<labelExpression><![CDATA[
|
||||
from openpype.pipeline import get_current_asset_name, get_current_task_name
|
||||
label = "{}, {}".format(get_current_asset_name(), get_current_task_name())
|
||||
return label
|
||||
]]></labelExpression>
|
||||
</actionItem>
|
||||
|
||||
<separatorItem/>
|
||||
|
||||
<scriptItem id="openpype_create">
|
||||
<label>Create...</label>
|
||||
|
|
|
|||
|
|
@ -2041,6 +2041,7 @@ class WorkfileSettings(object):
|
|||
)
|
||||
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
|
||||
|
||||
if not config_data:
|
||||
# TODO: backward compatibility for old projects - remove later
|
||||
|
|
@ -2091,6 +2092,15 @@ class WorkfileSettings(object):
|
|||
workfile_settings.pop("colorManagement", None)
|
||||
workfile_settings.pop("OCIO_config", None)
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut = workfile_settings.pop("monitorLut", None)
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
viewer_process_settings, monitor_lut)
|
||||
|
||||
# set monitor related knobs luts (MonitorOut, Thumbnails)
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
workfile_settings[knob] = value_
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in workfile_settings.items():
|
||||
# skip unfilled ocio config path
|
||||
|
|
@ -2107,8 +2117,9 @@ class WorkfileSettings(object):
|
|||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
log.info("OCIO config path found: `{}`".format(
|
||||
config_data["path"]))
|
||||
config_path))
|
||||
|
||||
# check if there's a mismatch between environment and settings
|
||||
correct_settings = self._is_settings_matching_environment(
|
||||
|
|
@ -2118,6 +2129,40 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
||||
Args:
|
||||
viewer_lut (str): viewer lut string
|
||||
monitor_lut (str): monitor lut string
|
||||
|
||||
Returns:
|
||||
dict: monitor settings
|
||||
"""
|
||||
output_data = {}
|
||||
m_display, m_viewer = get_viewer_config_from_string(monitor_lut)
|
||||
v_display, v_viewer = get_viewer_config_from_string(viewer_lut)
|
||||
|
||||
# set monitor lut differently for nuke version 14
|
||||
if nuke.NUKE_VERSION_MAJOR >= 14:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=False)
|
||||
|
||||
if nuke.NUKE_VERSION_MAJOR == 13:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=True)
|
||||
if nuke.NUKE_VERSION_MAJOR <= 12:
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=True)
|
||||
|
||||
return output_data
|
||||
|
||||
def _is_settings_matching_environment(self, config_data):
|
||||
""" Check if OCIO config path is different from environment
|
||||
|
||||
|
|
@ -2177,6 +2222,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
"""
|
||||
# replace path with env var if possible
|
||||
ocio_path = self._replace_ocio_path_with_env_var(config_data)
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
log.info("Setting OCIO config path to: `{}`".format(
|
||||
ocio_path))
|
||||
|
|
@ -2232,7 +2278,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
Returns:
|
||||
str: OCIO config path with environment variable TCL expression
|
||||
"""
|
||||
config_path = config_data["path"]
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
config_template = config_data["template"]
|
||||
|
||||
included_vars = self._get_included_vars(config_template)
|
||||
|
|
@ -3320,11 +3366,11 @@ def get_viewer_config_from_string(input_string):
|
|||
display = split[0]
|
||||
elif "(" in viewer:
|
||||
pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]"
|
||||
result = re.findall(pattern, viewer)
|
||||
result_ = re.findall(pattern, viewer)
|
||||
try:
|
||||
result = result.pop()
|
||||
display = str(result[1]).rstrip()
|
||||
viewer = str(result[0]).rstrip()
|
||||
result_ = result_.pop()
|
||||
display = str(result_[1]).rstrip()
|
||||
viewer = str(result_[0]).rstrip()
|
||||
except IndexError:
|
||||
raise IndexError((
|
||||
"Viewer Input string is not correct. "
|
||||
|
|
@ -3332,3 +3378,22 @@ def get_viewer_config_from_string(input_string):
|
|||
).format(input_string))
|
||||
|
||||
return (display, viewer)
|
||||
|
||||
|
||||
def create_viewer_profile_string(viewer, display=None, path_like=False):
|
||||
"""Convert viewer and display to string
|
||||
|
||||
Args:
|
||||
viewer (str): viewer name
|
||||
display (Optional[str]): display name
|
||||
path_like (Optional[bool]): if True, return path like string
|
||||
|
||||
Returns:
|
||||
str: viewer config string
|
||||
"""
|
||||
if not display:
|
||||
return viewer
|
||||
|
||||
if path_like:
|
||||
return "{}/{}".format(display, viewer)
|
||||
return "{} ({})".format(viewer, display)
|
||||
|
|
|
|||
|
|
@ -543,6 +543,9 @@ def list_instances(creator_id=None):
|
|||
|
||||
For SubsetManager
|
||||
|
||||
Args:
|
||||
creator_id (Optional[str]): creator identifier
|
||||
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
|
|
@ -575,10 +578,13 @@ def list_instances(creator_id=None):
|
|||
if creator_id and instance_data["creator_identifier"] != creator_id:
|
||||
continue
|
||||
|
||||
if instance_data["instance_id"] in instance_ids:
|
||||
instance_id = instance_data.get("instance_id")
|
||||
if not instance_id:
|
||||
pass
|
||||
elif instance_id in instance_ids:
|
||||
instance_data.pop("instance_id")
|
||||
else:
|
||||
instance_ids.add(instance_data["instance_id"])
|
||||
instance_ids.add(instance_id)
|
||||
|
||||
# node name could change, so update subset name data
|
||||
_update_subset_name_data(instance_data, node)
|
||||
|
|
|
|||
|
|
@ -14,27 +14,26 @@ class RepairActionBase(pyblish.api.Action):
|
|||
# Get the errored instances
|
||||
return get_errored_instances_from_context(context, plugin=plugin)
|
||||
|
||||
def repair_knob(self, instances, state):
|
||||
def repair_knob(self, context, instances, state):
|
||||
create_context = context.data["create_context"]
|
||||
for instance in instances:
|
||||
node = instance.data["transientData"]["node"]
|
||||
files_remove = [os.path.join(instance.data["outputDir"], f)
|
||||
for r in instance.data.get("representations", [])
|
||||
for f in r.get("files", [])
|
||||
]
|
||||
self.log.info("Files to be removed: {}".format(files_remove))
|
||||
for f in files_remove:
|
||||
os.remove(f)
|
||||
self.log.debug("removing file: {}".format(f))
|
||||
node["render"].setValue(state)
|
||||
# Reset the render knob
|
||||
instance_id = instance.data.get("instance_id")
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance_id
|
||||
)
|
||||
created_instance.creator_attributes["render_target"] = state
|
||||
self.log.info("Rendering toggled to `{}`".format(state))
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
class RepairCollectionActionToLocal(RepairActionBase):
|
||||
label = "Repair - rerender with \"Local\""
|
||||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "Local")
|
||||
self.repair_knob(context, instances, "local")
|
||||
|
||||
|
||||
class RepairCollectionActionToFarm(RepairActionBase):
|
||||
|
|
@ -42,7 +41,7 @@ class RepairCollectionActionToFarm(RepairActionBase):
|
|||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "On farm")
|
||||
self.repair_knob(context, instances, "farm")
|
||||
|
||||
|
||||
class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
|
|
@ -87,6 +89,11 @@ class ValidateNukeWriteNode(
|
|||
correct_data
|
||||
))
|
||||
|
||||
# Collect key values of same type in a list.
|
||||
values_by_name = defaultdict(list)
|
||||
for knob_data in correct_data["knobs"]:
|
||||
values_by_name[knob_data["name"]].append(knob_data["value"])
|
||||
|
||||
for knob_data in correct_data["knobs"]:
|
||||
knob_type = knob_data["type"]
|
||||
self.log.debug("__ knob_type: {}".format(
|
||||
|
|
@ -105,28 +112,33 @@ class ValidateNukeWriteNode(
|
|||
)
|
||||
|
||||
key = knob_data["name"]
|
||||
value = knob_data["value"]
|
||||
values = values_by_name[key]
|
||||
node_value = write_node[key].value()
|
||||
|
||||
# fix type differences
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
fixed_values = []
|
||||
for value in values:
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
|
||||
self.log.debug("__ key: {} | value: {}".format(
|
||||
key, value
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
|
||||
fixed_values.append(value)
|
||||
|
||||
self.log.debug("__ key: {} | values: {}".format(
|
||||
key, fixed_values
|
||||
))
|
||||
if (
|
||||
node_value != value
|
||||
node_value not in fixed_values
|
||||
and key != "file"
|
||||
and key != "tile_color"
|
||||
):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import clique
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
representations = instance.data.get("representations")
|
||||
for repr in representations:
|
||||
data = instance.data.get("assetEntity", {}).get("data", {})
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
repr_files = repr["files"]
|
||||
if isinstance(repr_files, str):
|
||||
continue
|
||||
|
||||
ext = repr.get("ext")
|
||||
if not ext:
|
||||
_, ext = os.path.splitext(repr_files[0])
|
||||
elif not ext.startswith("."):
|
||||
ext = ".{}".format(ext)
|
||||
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
|
||||
re.escape(ext))
|
||||
patterns = [pattern]
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
|
|
@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
if instance.data.get("slate"):
|
||||
# Slate is not part of the frame range
|
||||
frames = frames[1:]
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (data["clipIn"],
|
||||
data["clipOut"])
|
||||
|
|
|
|||
|
|
@ -280,13 +280,14 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
|
||||
for key, value in add_args.items():
|
||||
# Skip key values where value is None
|
||||
if value is not None:
|
||||
args.append("--{}".format(key))
|
||||
# Extend list into arguments (targets can be a list)
|
||||
if isinstance(value, (tuple, list)):
|
||||
args.extend(value)
|
||||
else:
|
||||
args.append(value)
|
||||
if value is None:
|
||||
continue
|
||||
arg_key = "--{}".format(key)
|
||||
if not isinstance(value, (tuple, list)):
|
||||
value = [value]
|
||||
|
||||
for item in value:
|
||||
args += [arg_key, item]
|
||||
|
||||
log.info("args:: {}".format(args))
|
||||
if add_to_queue:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import os
|
|||
import re
|
||||
import copy
|
||||
import inspect
|
||||
import collections
|
||||
import logging
|
||||
import weakref
|
||||
from uuid import uuid4
|
||||
|
|
@ -340,8 +341,8 @@ class EventSystem(object):
|
|||
event.emit()
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
def _process_event(self, event):
|
||||
"""Process event topic and trigger callbacks.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
|
|
@ -356,6 +357,91 @@ class EventSystem(object):
|
|||
for callback in invalid_callbacks:
|
||||
self._registered_callbacks.remove(callback)
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
self._process_event(event)
|
||||
|
||||
|
||||
class QueuedEventSystem(EventSystem):
|
||||
"""Events are automatically processed in queue.
|
||||
|
||||
If callback triggers another event, the event is not processed until
|
||||
all callbacks of previous event are processed.
|
||||
|
||||
Allows to implement custom event process loop by changing 'auto_execute'.
|
||||
|
||||
Note:
|
||||
This probably should be default behavior of 'EventSystem'. Changing it
|
||||
now could cause problems in existing code.
|
||||
|
||||
Args:
|
||||
auto_execute (Optional[bool]): If 'True', events are processed
|
||||
automatically. Custom loop calling 'process_next_event'
|
||||
must be implemented when set to 'False'.
|
||||
"""
|
||||
|
||||
def __init__(self, auto_execute=True):
|
||||
super(QueuedEventSystem, self).__init__()
|
||||
self._event_queue = collections.deque()
|
||||
self._current_event = None
|
||||
self._auto_execute = auto_execute
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
||||
|
||||
def count(self):
|
||||
"""Get number of events in queue.
|
||||
|
||||
Returns:
|
||||
int: Number of events in queue.
|
||||
"""
|
||||
|
||||
return len(self._event_queue)
|
||||
|
||||
def process_next_event(self):
|
||||
"""Process next event in queue.
|
||||
|
||||
Should be used only if 'auto_execute' is set to 'False'. Only single
|
||||
event is processed.
|
||||
|
||||
Returns:
|
||||
Union[Event, None]: Processed event.
|
||||
"""
|
||||
|
||||
if self._current_event is not None:
|
||||
raise ValueError("An event is already in progress.")
|
||||
|
||||
if not self._event_queue:
|
||||
return None
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
if not self._auto_execute or self._current_event is not None:
|
||||
self._event_queue.append(event)
|
||||
return
|
||||
|
||||
self._event_queue.append(event)
|
||||
while self._event_queue:
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
|
||||
|
||||
class GlobalEventSystem:
|
||||
"""Event system living in global scope of process.
|
||||
|
|
|
|||
|
|
@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
addons_info = _get_ayon_addons_information()
|
||||
if not addons_info:
|
||||
return v3_addons_to_skip
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
addons_dir = os.environ.get("AYON_ADDONS_DIR")
|
||||
if not addons_dir:
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
if not os.path.exists(addons_dir):
|
||||
log.warning("Addons directory does not exists. Path \"{}\"".format(
|
||||
addons_dir
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists.
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
|
|
@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
if k in default_servers
|
||||
}
|
||||
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
if instance_server not in project_enabled_servers:
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
)
|
||||
)
|
||||
)
|
||||
assert instance_server in project_enabled_servers, msg
|
||||
raise KnownPublishError(msg)
|
||||
|
||||
self.log.debug("Using project approved server.")
|
||||
return project_enabled_servers[instance_server]
|
||||
|
|
|
|||
|
|
@ -1,31 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<title>Deadline Pools</title>
|
||||
<description>
|
||||
## Invalid Deadline pools found
|
||||
## Invalid Deadline pools found
|
||||
|
||||
Configured pools don't match what is set in Deadline.
|
||||
Configured pools don't match available pools in Deadline.
|
||||
|
||||
{invalid_value_str}
|
||||
### How to repair?
|
||||
|
||||
### How to repair?
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
In other cases inform admin to change them in Settings.
|
||||
|
||||
In other cases inform admin to change them in Settings.
|
||||
Available deadline pools:
|
||||
|
||||
{pools_str}
|
||||
|
||||
Available deadline pools {pools_str}.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__
|
||||
### __Detailed Info__
|
||||
|
||||
This error is shown when deadline pool is not on Deadline anymore. It
|
||||
could happen in case of republish old workfile which was created with
|
||||
previous deadline pools,
|
||||
or someone changed pools on Deadline side, but didn't modify Openpype
|
||||
Settings.
|
||||
This error is shown when a configured pool is not available on Deadline. It
|
||||
can happen when publishing old workfiles which were created with previous
|
||||
deadline pools, or someone changed the available pools in Deadline,
|
||||
but didn't modify Openpype Settings to match the changes.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -12,7 +12,9 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
from openpype.hosts.max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
|
|
@ -246,7 +248,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if instance.data["renderer"] == "Redshift_Renderer":
|
||||
self.log.debug("Using Redshift...published scene wont be used..")
|
||||
replace_in_path = False
|
||||
return replace_in_path
|
||||
return replace_with_published_scene_path(
|
||||
instance, replace_in_path)
|
||||
|
||||
@staticmethod
|
||||
def _iter_expected_files(exp):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
|
@ -12,34 +11,25 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer", "render"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.info(
|
||||
"We have deadline URL on instance {}".format(
|
||||
deadline_url))
|
||||
self.log.debug(
|
||||
"We have deadline URL on instance {}".format(deadline_url)
|
||||
)
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Check response
|
||||
response = self._requests_get(deadline_url)
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
)
|
||||
|
||||
def _requests_get(self, *args, **kwargs):
|
||||
""" Wrapper for requests, disabling SSL certificate validation if
|
||||
DONT_VERIFY_SSL environment variable is found. This is useful when
|
||||
Deadline or Muster server are running with self-signed certificates
|
||||
and their certificate is not added to trusted certificates on
|
||||
client machines.
|
||||
|
||||
WARNING: disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing and it is not recommended.
|
||||
"""
|
||||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -25,33 +25,58 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
"maxrender"]
|
||||
optional = True
|
||||
|
||||
# cache
|
||||
pools_per_url = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
self.log.info("deadline_url::{}".format(deadline_url))
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log)
|
||||
self.log.info("pools::{}".format(pools))
|
||||
|
||||
formatting_data = {
|
||||
"pools_str": ",".join(pools)
|
||||
}
|
||||
deadline_url = self.get_deadline_url(instance)
|
||||
pools = self.get_pools(deadline_url)
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
if primary_pool and primary_pool not in pools:
|
||||
msg = "Configured primary '{}' not present on Deadline".format(
|
||||
instance.data["primaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["primary"] = primary_pool
|
||||
|
||||
secondary_pool = instance.data.get("secondaryPool")
|
||||
if secondary_pool and secondary_pool not in pools:
|
||||
msg = "Configured secondary '{}' not present on Deadline".format(
|
||||
instance.data["secondaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["secondary"] = secondary_pool
|
||||
|
||||
if invalid_pools:
|
||||
message = "\n".join(
|
||||
"{} pool '{}' not available on Deadline".format(key.title(),
|
||||
pool)
|
||||
for key, pool in invalid_pools.items()
|
||||
)
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=message,
|
||||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_deadline_url(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
if instance.data.get("deadlineUrl"):
|
||||
# if custom one is set in instance, use that
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
return deadline_url
|
||||
|
||||
def get_pools(self, deadline_url):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url,
|
||||
log=self.log)
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
return self.pools_per_url[deadline_url]
|
||||
|
|
|
|||
|
|
@ -91,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
|
|
|
|||
|
|
@ -547,7 +547,14 @@ def get_ayon_executable():
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
return exe_list
|
||||
|
||||
# Expand user paths
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
return ";".join(expanded_paths)
|
||||
|
||||
|
||||
def inject_render_job_id(deadlinePlugin):
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def get_transferable_representations(instance):
|
|||
to_transfer = []
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
if "publish_on_farm" not in representation.get("tags"):
|
||||
if "publish_on_farm" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
trans_rep = representation.copy()
|
||||
|
|
@ -265,8 +265,7 @@ def create_skeleton_instance(
|
|||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
representations = get_transferable_representations(instance)
|
||||
instance_skeleton_data["representations"] = []
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instance_skeleton_data["representations"] = representations
|
||||
|
||||
persistent = instance.data.get("stagingDir_persistent") is True
|
||||
instance_skeleton_data["stagingDir_persistent"] = persistent
|
||||
|
|
|
|||
|
|
@ -464,9 +464,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None):
|
|||
|
||||
for option, value in settings.items():
|
||||
if logger:
|
||||
logger.debug("Plugin {} - Attr: {} -> {}".format(
|
||||
option, value, plugin.__name__
|
||||
))
|
||||
logger.debug("Plugin %s - Attr: %s -> %s",
|
||||
plugin.__name__, option, value)
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,11 @@ from ayon_api import slugify_string
|
|||
from ayon_api.entity_hub import EntityHub
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.client import get_assets
|
||||
from openpype.pipeline.template_data import (
|
||||
get_asset_template_data,
|
||||
get_task_template_data,
|
||||
)
|
||||
|
||||
|
||||
def _default_json_parse(value):
|
||||
|
|
@ -27,13 +32,51 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = context.data.get("hierarchyContext")
|
||||
if not hierarchy_context:
|
||||
self.log.info("Skipping")
|
||||
self.log.debug("Skipping")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
self._create_hierarchy(context, project_name)
|
||||
self._fill_instance_entities(context, project_name)
|
||||
|
||||
def _fill_instance_entities(self, context, project_name):
|
||||
instances_by_asset_name = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
instance_entity = instance.data.get("assetEntity")
|
||||
if instance_entity:
|
||||
continue
|
||||
|
||||
# Skip if instance asset does not match
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
instances_by_asset_name[instance_asset_name].append(instance)
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
asset_docs = get_assets(
|
||||
project_name, asset_names=instances_by_asset_name.keys()
|
||||
)
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
for asset_name, instances in instances_by_asset_name.items():
|
||||
asset_doc = asset_docs_by_name[asset_name]
|
||||
asset_data = get_asset_template_data(asset_doc, project_name)
|
||||
for instance in instances:
|
||||
task_name = instance.data.get("task")
|
||||
template_data = get_task_template_data(
|
||||
project_doc, asset_doc, task_name)
|
||||
template_data.update(copy.deepcopy(asset_data))
|
||||
|
||||
instance.data["anatomyData"].update(template_data)
|
||||
instance.data["assetEntity"] = asset_doc
|
||||
|
||||
def _create_hierarchy(self, context, project_name):
|
||||
hierarchy_context = self._filter_hierarchy(context)
|
||||
if not hierarchy_context:
|
||||
self.log.info("All folders were filtered out")
|
||||
self.log.debug("All folders were filtered out")
|
||||
return
|
||||
|
||||
self.log.debug("Hierarchy_context: {}".format(
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
if thumbnail_created:
|
||||
return full_output_path
|
||||
|
||||
self.log.warning("Thumbanil has not been created.")
|
||||
self.log.warning("Thumbnail has not been created.")
|
||||
|
||||
def _instance_has_thumbnail(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -147,6 +147,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
"-a", src_path,
|
||||
"--ch", "R,G,B",
|
||||
"-o", dst_path
|
||||
)
|
||||
self.log.info("Running: {}".format(" ".join(oiio_cmd)))
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ from openpype.pipeline.publish import (
|
|||
|
||||
|
||||
class ValidatePublishDir(pyblish.api.InstancePlugin):
|
||||
"""Validates if 'publishDir' is a project directory
|
||||
"""Validates if files are being published into a project directory
|
||||
|
||||
'publishDir' is collected based on publish templates. In specific cases
|
||||
('source' template) source folder of items is used as a 'publishDir', this
|
||||
validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, unaccessible for studio'
|
||||
In specific cases ('source' template - in place publishing) source folder
|
||||
of published items is used as a regular `publish` dir.
|
||||
This validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, inaccessible for studio')
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
# original_dirname must be convertable to rootless path
|
||||
# in other case it is path inside of root folder for the project
|
||||
success, _ = anatomy.find_root_template_from_path(original_dirname)
|
||||
|
||||
formatting_data = {
|
||||
|
|
@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
formatting_data=formatting_data)
|
||||
|
||||
def _get_template_name_from_instance(self, instance):
|
||||
"""Find template which will be used during integration."""
|
||||
project_name = instance.context.data["projectName"]
|
||||
host_name = instance.context.data["hostName"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
family = anatomy_data["family"]
|
||||
family = self.family_mapping.get("family") or family
|
||||
family = self.family_mapping.get(family) or family
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
|
||||
return get_publish_template_name(
|
||||
|
|
|
|||
|
|
@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
# TODO: Remove full non-html version upon drop of old publisher
|
||||
msg = (
|
||||
"Version '{0}' from instance '{1}' that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: '{2}'."
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: '{2}'."
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: '{2}'."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
||||
msg_html = (
|
||||
"Version <b>{0}</b> from instance <b>{1}</b> that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: <b>{2}</b>."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
|
|
|||
|
|
@ -616,6 +616,23 @@ def _convert_maya_project_settings(ayon_settings, output):
|
|||
output["maya"] = ayon_maya
|
||||
|
||||
|
||||
def _convert_3dsmax_project_settings(ayon_settings, output):
|
||||
if "max" not in ayon_settings:
|
||||
return
|
||||
|
||||
ayon_max = ayon_settings["max"]
|
||||
_convert_host_imageio(ayon_max)
|
||||
if "PointCloud" in ayon_max:
|
||||
point_cloud_attribute = ayon_max["PointCloud"]["attribute"]
|
||||
new_point_cloud_attribute = {
|
||||
item["name"]: item["value"]
|
||||
for item in point_cloud_attribute
|
||||
}
|
||||
ayon_max["PointCloud"]["attribute"] = new_point_cloud_attribute
|
||||
|
||||
output["max"] = ayon_max
|
||||
|
||||
|
||||
def _convert_nuke_knobs(knobs):
|
||||
new_knobs = []
|
||||
for knob in knobs:
|
||||
|
|
@ -737,6 +754,17 @@ def _convert_nuke_project_settings(ayon_settings, output):
|
|||
item_filter["subsets"] = item_filter.pop("product_names")
|
||||
item_filter["families"] = item_filter.pop("product_types")
|
||||
|
||||
reformat_nodes_config = item.get("reformat_nodes_config") or {}
|
||||
reposition_nodes = reformat_nodes_config.get(
|
||||
"reposition_nodes") or []
|
||||
|
||||
for reposition_node in reposition_nodes:
|
||||
if "knobs" not in reposition_node:
|
||||
continue
|
||||
reposition_node["knobs"] = _convert_nuke_knobs(
|
||||
reposition_node["knobs"]
|
||||
)
|
||||
|
||||
name = item.pop("name")
|
||||
new_review_data_outputs[name] = item
|
||||
ayon_publish["ExtractReviewDataMov"]["outputs"] = new_review_data_outputs
|
||||
|
|
@ -1261,6 +1289,7 @@ def convert_project_settings(ayon_settings, default_settings):
|
|||
_convert_flame_project_settings(ayon_settings, output)
|
||||
_convert_fusion_project_settings(ayon_settings, output)
|
||||
_convert_maya_project_settings(ayon_settings, output)
|
||||
_convert_3dsmax_project_settings(ayon_settings, output)
|
||||
_convert_nuke_project_settings(ayon_settings, output)
|
||||
_convert_hiero_project_settings(ayon_settings, output)
|
||||
_convert_photoshop_project_settings(ayon_settings, output)
|
||||
|
|
|
|||
|
|
@ -256,6 +256,23 @@
|
|||
"allow_multiple_items": true,
|
||||
"allow_version_control": false,
|
||||
"extensions": []
|
||||
},
|
||||
{
|
||||
"family": "audio",
|
||||
"identifier": "",
|
||||
"label": "Audio ",
|
||||
"icon": "fa5s.file-audio",
|
||||
"default_variants": [
|
||||
"Main"
|
||||
],
|
||||
"description": "Audio product",
|
||||
"detailed_description": "Audio files for review or final delivery",
|
||||
"allow_sequences": false,
|
||||
"allow_multiple_items": false,
|
||||
"allow_version_control": false,
|
||||
"extensions": [
|
||||
".wav"
|
||||
]
|
||||
}
|
||||
],
|
||||
"editorial_creators": {
|
||||
|
|
|
|||
|
|
@ -343,6 +343,7 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
return self._input_widget.text()
|
||||
|
||||
def set_value(self, value, multivalue=False):
|
||||
block_signals = False
|
||||
if multivalue:
|
||||
set_value = set(value)
|
||||
if None in set_value:
|
||||
|
|
@ -352,13 +353,18 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
if len(set_value) == 1:
|
||||
value = tuple(set_value)[0]
|
||||
else:
|
||||
block_signals = True
|
||||
value = "< Multiselection >"
|
||||
|
||||
if value != self.current_value():
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(True)
|
||||
if self.multiline:
|
||||
self._input_widget.setPlainText(value)
|
||||
else:
|
||||
self._input_widget.setText(value)
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(False)
|
||||
|
||||
|
||||
class BoolAttrWidget(_BaseAttrDefWidget):
|
||||
|
|
@ -391,7 +397,9 @@ class BoolAttrWidget(_BaseAttrDefWidget):
|
|||
set_value.add(self.attr_def.default)
|
||||
|
||||
if len(set_value) > 1:
|
||||
self._input_widget.blockSignals(True)
|
||||
self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked)
|
||||
self._input_widget.blockSignals(False)
|
||||
return
|
||||
value = tuple(set_value)[0]
|
||||
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
def make_sure_animation_is_finished(self):
|
||||
if self._change_anim.state() == QtCore.QAbstractAnimation.Running:
|
||||
self._change_anim.stop()
|
||||
self._on_change_anim_finished()
|
||||
self._on_change_anim_finished()
|
||||
|
||||
def set_state(self, new_state, animate):
|
||||
if new_state == self._current_state:
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class TasksModel(QtGui.QStandardItemModel):
|
|||
|
||||
def set_asset_id(self, asset_id):
|
||||
asset_doc = None
|
||||
if self._context_is_valid():
|
||||
if asset_id and self._context_is_valid():
|
||||
project_name = self._get_current_project()
|
||||
asset_doc = get_asset_by_id(
|
||||
project_name, asset_id, fields=["data.tasks"]
|
||||
|
|
|
|||
|
|
@ -48,6 +48,11 @@ from ._api import (
|
|||
patch,
|
||||
delete,
|
||||
|
||||
get_timeout,
|
||||
set_timeout,
|
||||
get_max_retries,
|
||||
set_max_retries,
|
||||
|
||||
get_event,
|
||||
get_events,
|
||||
dispatch_event,
|
||||
|
|
@ -245,6 +250,11 @@ __all__ = (
|
|||
"patch",
|
||||
"delete",
|
||||
|
||||
"get_timeout",
|
||||
"set_timeout",
|
||||
"get_max_retries",
|
||||
"set_max_retries",
|
||||
|
||||
"get_event",
|
||||
"get_events",
|
||||
"dispatch_event",
|
||||
|
|
|
|||
20
openpype/vendor/python/common/ayon_api/_api.py
vendored
20
openpype/vendor/python/common/ayon_api/_api.py
vendored
|
|
@ -474,6 +474,26 @@ def delete(*args, **kwargs):
|
|||
return con.delete(*args, **kwargs)
|
||||
|
||||
|
||||
def get_timeout(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_timeout(*args, **kwargs)
|
||||
|
||||
|
||||
def set_timeout(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.set_timeout(*args, **kwargs)
|
||||
|
||||
|
||||
def get_max_retries(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_max_retries(*args, **kwargs)
|
||||
|
||||
|
||||
def set_max_retries(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.set_max_retries(*args, **kwargs)
|
||||
|
||||
|
||||
def get_event(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_event(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,21 @@
|
|||
# Environments where server url and api key are stored for global connection
|
||||
SERVER_URL_ENV_KEY = "AYON_SERVER_URL"
|
||||
SERVER_API_ENV_KEY = "AYON_API_KEY"
|
||||
SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT"
|
||||
SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES"
|
||||
|
||||
# Backwards compatibility
|
||||
SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY
|
||||
|
||||
# --- User ---
|
||||
DEFAULT_USER_FIELDS = {
|
||||
"roles",
|
||||
"accessGroups",
|
||||
"defaultAccessGroups",
|
||||
"name",
|
||||
"isService",
|
||||
"isManager",
|
||||
"isGuest",
|
||||
"isAdmin",
|
||||
"defaultRoles",
|
||||
"createdAt",
|
||||
"active",
|
||||
"hasPassword",
|
||||
|
|
|
|||
|
|
@ -247,9 +247,11 @@ def products_graphql_query(fields):
|
|||
query = GraphQlQuery("ProductsQuery")
|
||||
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
product_ids_var = query.add_variable("productIds", "[String!]")
|
||||
product_names_var = query.add_variable("productNames", "[String!]")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
product_types_var = query.add_variable("productTypes", "[String!]")
|
||||
statuses_var = query.add_variable("statuses", "[String!]")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
project_field.set_filter("name", project_name_var)
|
||||
|
|
@ -258,6 +260,8 @@ def products_graphql_query(fields):
|
|||
products_field.set_filter("ids", product_ids_var)
|
||||
products_field.set_filter("names", product_names_var)
|
||||
products_field.set_filter("folderIds", folder_ids_var)
|
||||
products_field.set_filter("productTypes", product_types_var)
|
||||
products_field.set_filter("statuses", statuses_var)
|
||||
|
||||
nested_fields = fields_to_dict(set(fields))
|
||||
add_links_fields(products_field, nested_fields)
|
||||
|
|
|
|||
236
openpype/vendor/python/common/ayon_api/server_api.py
vendored
236
openpype/vendor/python/common/ayon_api/server_api.py
vendored
|
|
@ -2,6 +2,7 @@ import os
|
|||
import re
|
||||
import io
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import collections
|
||||
import platform
|
||||
|
|
@ -26,6 +27,8 @@ except ImportError:
|
|||
from json import JSONDecodeError as RequestsJSONDecodeError
|
||||
|
||||
from .constants import (
|
||||
SERVER_TIMEOUT_ENV_KEY,
|
||||
SERVER_RETRIES_ENV_KEY,
|
||||
DEFAULT_PRODUCT_TYPE_FIELDS,
|
||||
DEFAULT_PROJECT_FIELDS,
|
||||
DEFAULT_FOLDER_FIELDS,
|
||||
|
|
@ -127,6 +130,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def text(self):
|
||||
if self._response is None:
|
||||
return self.detail
|
||||
return self._response.text
|
||||
|
||||
@property
|
||||
|
|
@ -135,6 +140,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def headers(self):
|
||||
if self._response is None:
|
||||
return {}
|
||||
return self._response.headers
|
||||
|
||||
@property
|
||||
|
|
@ -148,6 +155,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def content(self):
|
||||
if self._response is None:
|
||||
return b""
|
||||
return self._response.content
|
||||
|
||||
@property
|
||||
|
|
@ -339,7 +348,11 @@ class ServerAPI(object):
|
|||
variable value 'AYON_CERT_FILE' by default.
|
||||
create_session (Optional[bool]): Create session for connection if
|
||||
token is available. Default is True.
|
||||
timeout (Optional[float]): Timeout for requests.
|
||||
max_retries (Optional[int]): Number of retries for requests.
|
||||
"""
|
||||
_default_timeout = 10.0
|
||||
_default_max_retries = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -352,6 +365,8 @@ class ServerAPI(object):
|
|||
ssl_verify=None,
|
||||
cert=None,
|
||||
create_session=True,
|
||||
timeout=None,
|
||||
max_retries=None,
|
||||
):
|
||||
if not base_url:
|
||||
raise ValueError("Invalid server URL {}".format(str(base_url)))
|
||||
|
|
@ -370,6 +385,13 @@ class ServerAPI(object):
|
|||
)
|
||||
self._sender = sender
|
||||
|
||||
self._timeout = None
|
||||
self._max_retries = None
|
||||
|
||||
# Set timeout and max retries based on passed values
|
||||
self.set_timeout(timeout)
|
||||
self.set_max_retries(max_retries)
|
||||
|
||||
if ssl_verify is None:
|
||||
# Custom AYON env variable for CA file or 'True'
|
||||
# - that should cover most default behaviors in 'requests'
|
||||
|
|
@ -474,6 +496,87 @@ class ServerAPI(object):
|
|||
ssl_verify = property(get_ssl_verify, set_ssl_verify)
|
||||
cert = property(get_cert, set_cert)
|
||||
|
||||
@classmethod
|
||||
def get_default_timeout(cls):
|
||||
"""Default value for requests timeout.
|
||||
|
||||
First looks for environment variable SERVER_TIMEOUT_ENV_KEY which
|
||||
can affect timeout value. If not available then use class
|
||||
attribute '_default_timeout'.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
try:
|
||||
return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return cls._default_timeout
|
||||
|
||||
@classmethod
|
||||
def get_default_max_retries(cls):
|
||||
"""Default value for requests max retries.
|
||||
|
||||
First looks for environment variable SERVER_RETRIES_ENV_KEY, which
|
||||
can affect max retries value. If not available then use class
|
||||
attribute '_default_max_retries'.
|
||||
|
||||
Returns:
|
||||
int: Max retries value.
|
||||
"""
|
||||
|
||||
try:
|
||||
return int(os.environ.get(SERVER_RETRIES_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return cls._default_max_retries
|
||||
|
||||
def get_timeout(self):
|
||||
"""Current value for requests timeout.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
return self._timeout
|
||||
|
||||
def set_timeout(self, timeout):
|
||||
"""Change timeout value for requests.
|
||||
|
||||
Args:
|
||||
timeout (Union[float, None]): Timeout value in seconds.
|
||||
"""
|
||||
|
||||
if timeout is None:
|
||||
timeout = self.get_default_timeout()
|
||||
self._timeout = float(timeout)
|
||||
|
||||
def get_max_retries(self):
|
||||
"""Current value for requests max retries.
|
||||
|
||||
Returns:
|
||||
int: Max retries value.
|
||||
"""
|
||||
|
||||
return self._max_retries
|
||||
|
||||
def set_max_retries(self, max_retries):
|
||||
"""Change max retries value for requests.
|
||||
|
||||
Args:
|
||||
max_retries (Union[int, None]): Max retries value.
|
||||
"""
|
||||
|
||||
if max_retries is None:
|
||||
max_retries = self.get_default_max_retries()
|
||||
self._max_retries = int(max_retries)
|
||||
|
||||
timeout = property(get_timeout, set_timeout)
|
||||
max_retries = property(get_max_retries, set_max_retries)
|
||||
|
||||
@property
|
||||
def access_token(self):
|
||||
"""Access token used for authorization to server.
|
||||
|
|
@ -890,9 +993,17 @@ class ServerAPI(object):
|
|||
for attr, filter_value in filters.items():
|
||||
query.set_variable_value(attr, filter_value)
|
||||
|
||||
# Backwards compatibility for server 0.3.x
|
||||
# - will be removed in future releases
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
access_groups_field = "accessGroups"
|
||||
if major == 0 and minor <= 3:
|
||||
access_groups_field = "roles"
|
||||
|
||||
for parsed_data in query.continuous_query(self):
|
||||
for user in parsed_data["users"]:
|
||||
user["roles"] = json.loads(user["roles"])
|
||||
user[access_groups_field] = json.loads(
|
||||
user[access_groups_field])
|
||||
yield user
|
||||
|
||||
def get_user(self, username=None):
|
||||
|
|
@ -1004,6 +1115,10 @@ class ServerAPI(object):
|
|||
logout_from_server(self._base_url, self._access_token)
|
||||
|
||||
def _do_rest_request(self, function, url, **kwargs):
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
max_retries = kwargs.get("max_retries", self.max_retries)
|
||||
if max_retries < 1:
|
||||
max_retries = 1
|
||||
if self._session is None:
|
||||
# Validate token if was not yet validated
|
||||
# - ignore validation if we're in middle of
|
||||
|
|
@ -1023,38 +1138,54 @@ class ServerAPI(object):
|
|||
elif isinstance(function, RequestType):
|
||||
function = self._session_functions_mapping[function]
|
||||
|
||||
try:
|
||||
response = function(url, **kwargs)
|
||||
response = None
|
||||
new_response = None
|
||||
for _ in range(max_retries):
|
||||
try:
|
||||
response = function(url, **kwargs)
|
||||
break
|
||||
|
||||
except ConnectionRefusedError:
|
||||
# Server may be restarting
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection refused"}
|
||||
)
|
||||
except requests.exceptions.Timeout:
|
||||
# Connection timed out
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Connection timed out."}
|
||||
)
|
||||
except requests.exceptions.ConnectionError:
|
||||
# Other connection error (ssl, etc) - does not make sense to
|
||||
# try call server again
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection error"}
|
||||
)
|
||||
break
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
if new_response is not None:
|
||||
return new_response
|
||||
|
||||
content_type = response.headers.get("Content-Type")
|
||||
if content_type == "application/json":
|
||||
try:
|
||||
new_response = RestApiResponse(response)
|
||||
except JSONDecodeError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{
|
||||
"detail": "The response is not a JSON: {}".format(
|
||||
response.text)
|
||||
}
|
||||
)
|
||||
|
||||
except ConnectionRefusedError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection refused"}
|
||||
)
|
||||
except requests.exceptions.ConnectionError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection error"}
|
||||
)
|
||||
else:
|
||||
content_type = response.headers.get("Content-Type")
|
||||
if content_type == "application/json":
|
||||
try:
|
||||
new_response = RestApiResponse(response)
|
||||
except JSONDecodeError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{
|
||||
"detail": "The response is not a JSON: {}".format(
|
||||
response.text)
|
||||
}
|
||||
)
|
||||
|
||||
elif content_type in ("image/jpeg", "image/png"):
|
||||
new_response = RestApiResponse(response)
|
||||
|
||||
else:
|
||||
new_response = RestApiResponse(response)
|
||||
new_response = RestApiResponse(response)
|
||||
|
||||
self.log.debug("Response {}".format(str(new_response)))
|
||||
return new_response
|
||||
|
|
@ -1747,7 +1878,15 @@ class ServerAPI(object):
|
|||
entity_type_defaults = DEFAULT_WORKFILE_INFO_FIELDS
|
||||
|
||||
elif entity_type == "user":
|
||||
entity_type_defaults = DEFAULT_USER_FIELDS
|
||||
entity_type_defaults = set(DEFAULT_USER_FIELDS)
|
||||
# Backwards compatibility for server 0.3.x
|
||||
# - will be removed in future releases
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if major == 0 and minor <= 3:
|
||||
entity_type_defaults.discard("accessGroups")
|
||||
entity_type_defaults.discard("defaultAccessGroups")
|
||||
entity_type_defaults.add("roles")
|
||||
entity_type_defaults.add("defaultRoles")
|
||||
|
||||
else:
|
||||
raise ValueError("Unknown entity type \"{}\"".format(entity_type))
|
||||
|
|
@ -2124,7 +2263,12 @@ class ServerAPI(object):
|
|||
server.
|
||||
"""
|
||||
|
||||
result = self.get("desktop/dependency_packages")
|
||||
endpoint = "desktop/dependencyPackages"
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if major == 0 and minor <= 3:
|
||||
endpoint = "desktop/dependency_packages"
|
||||
|
||||
result = self.get(endpoint)
|
||||
result.raise_for_status()
|
||||
return result.data
|
||||
|
||||
|
|
@ -3810,6 +3954,8 @@ class ServerAPI(object):
|
|||
product_ids=None,
|
||||
product_names=None,
|
||||
folder_ids=None,
|
||||
product_types=None,
|
||||
statuses=None,
|
||||
names_by_folder_ids=None,
|
||||
active=True,
|
||||
fields=None,
|
||||
|
|
@ -3828,6 +3974,10 @@ class ServerAPI(object):
|
|||
filtering.
|
||||
folder_ids (Optional[Iterable[str]]): Ids of task parents.
|
||||
Use 'None' if folder is direct child of project.
|
||||
product_types (Optional[Iterable[str]]): Product types used for
|
||||
filtering.
|
||||
statuses (Optional[Iterable[str]]): Product statuses used for
|
||||
filtering.
|
||||
names_by_folder_ids (Optional[dict[str, Iterable[str]]]): Product
|
||||
name filtering by folder id.
|
||||
active (Optional[bool]): Filter active/inactive products.
|
||||
|
|
@ -3862,6 +4012,18 @@ class ServerAPI(object):
|
|||
if not filter_folder_ids:
|
||||
return
|
||||
|
||||
filter_product_types = None
|
||||
if product_types is not None:
|
||||
filter_product_types = set(product_types)
|
||||
if not filter_product_types:
|
||||
return
|
||||
|
||||
filter_statuses = None
|
||||
if statuses is not None:
|
||||
filter_statuses = set(statuses)
|
||||
if not filter_statuses:
|
||||
return
|
||||
|
||||
# This will disable 'folder_ids' and 'product_names' filters
|
||||
# - maybe could be enhanced in future?
|
||||
if names_by_folder_ids is not None:
|
||||
|
|
@ -3881,7 +4043,7 @@ class ServerAPI(object):
|
|||
fields = set(fields) | {"id"}
|
||||
if "attrib" in fields:
|
||||
fields.remove("attrib")
|
||||
fields |= self.get_attributes_fields_for_type("folder")
|
||||
fields |= self.get_attributes_fields_for_type("product")
|
||||
else:
|
||||
fields = self.get_default_fields_for_type("product")
|
||||
|
||||
|
|
@ -3908,6 +4070,12 @@ class ServerAPI(object):
|
|||
if filter_folder_ids:
|
||||
filters["folderIds"] = list(filter_folder_ids)
|
||||
|
||||
if filter_product_types:
|
||||
filters["productTypes"] = list(filter_product_types)
|
||||
|
||||
if filter_statuses:
|
||||
filters["statuses"] = list(filter_statuses)
|
||||
|
||||
if product_ids:
|
||||
filters["productIds"] = list(product_ids)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
"""Package declaring Python API for Ayon server."""
|
||||
__version__ = "0.3.5"
|
||||
__version__ = "0.4.1"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.16.5-nightly.1"
|
||||
__version__ = "3.16.5-nightly.3"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_server.settings import BaseSettingsModel
|
|||
|
||||
class CreateRenderPlugin(BaseSettingsModel):
|
||||
mark_for_review: bool = Field(True, title="Review")
|
||||
defaults: list[str] = Field(
|
||||
default_variants: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Default Variants"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ DEFAULT_AFTEREFFECTS_SETTING = {
|
|||
"create": {
|
||||
"RenderCreator": {
|
||||
"mark_for_review": True,
|
||||
"defaults": [
|
||||
"default_variants": [
|
||||
"Main"
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from ayon_server.settings import (
|
|||
BaseSettingsModel,
|
||||
MultiplatformPathListModel,
|
||||
ensure_unique_names,
|
||||
task_types_enum,
|
||||
)
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
|
||||
|
|
@ -38,13 +39,52 @@ class CoreImageIOConfigModel(BaseSettingsModel):
|
|||
class CoreImageIOBaseModel(BaseSettingsModel):
|
||||
activate_global_color_management: bool = Field(
|
||||
False,
|
||||
title="Override global OCIO config"
|
||||
title="Enable Color Management"
|
||||
)
|
||||
ocio_config: CoreImageIOConfigModel = Field(
|
||||
default_factory=CoreImageIOConfigModel, title="OCIO config"
|
||||
default_factory=CoreImageIOConfigModel,
|
||||
title="OCIO config"
|
||||
)
|
||||
file_rules: CoreImageIOFileRulesModel = Field(
|
||||
default_factory=CoreImageIOFileRulesModel, title="File Rules"
|
||||
default_factory=CoreImageIOFileRulesModel,
|
||||
title="File Rules"
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryProfileModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
host_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Host names"
|
||||
)
|
||||
task_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task types",
|
||||
enum_resolver=task_types_enum
|
||||
)
|
||||
task_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task names"
|
||||
)
|
||||
product_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product types"
|
||||
)
|
||||
product_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product names"
|
||||
)
|
||||
version_start: int = Field(
|
||||
1,
|
||||
title="Version Start",
|
||||
ge=0
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryModel(BaseSettingsModel):
|
||||
profiles: list[VersionStartCategoryProfileModel] = Field(
|
||||
default_factory=list,
|
||||
title="Profiles"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -61,6 +101,10 @@ class CoreSettings(BaseSettingsModel):
|
|||
default_factory=GlobalToolsModel,
|
||||
title="Tools"
|
||||
)
|
||||
version_start_category: VersionStartCategoryModel = Field(
|
||||
default_factory=VersionStartCategoryModel,
|
||||
title="Version start"
|
||||
)
|
||||
imageio: CoreImageIOBaseModel = Field(
|
||||
default_factory=CoreImageIOBaseModel,
|
||||
title="Color Management (ImageIO)"
|
||||
|
|
@ -131,6 +175,9 @@ DEFAULT_VALUES = {
|
|||
"studio_code": "",
|
||||
"environments": "{}",
|
||||
"tools": DEFAULT_TOOLS_VALUES,
|
||||
"version_start_category": {
|
||||
"profiles": []
|
||||
},
|
||||
"publish": DEFAULT_PUBLISH_VALUES,
|
||||
"project_folder_structure": json.dumps({
|
||||
"__project_root__": {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -44,6 +44,6 @@ class RenderSettingsModel(BaseSettingsModel):
|
|||
DEFAULT_RENDER_SETTINGS = {
|
||||
"default_render_image_folder": "renders/3dsmax",
|
||||
"aov_separator": "underscore",
|
||||
"image_format": "png",
|
||||
"image_format": "exr",
|
||||
"multipass": True
|
||||
}
|
||||
|
|
|
|||
|
|
@ -252,7 +252,9 @@ DEFAULT_CREATORS_SETTINGS = {
|
|||
},
|
||||
"CreateUnrealSkeletalMesh": {
|
||||
"enabled": True,
|
||||
"default_variants": [],
|
||||
"default_variants": [
|
||||
"Main",
|
||||
],
|
||||
"joint_hints": "jnt_org"
|
||||
},
|
||||
"CreateMultiverseLook": {
|
||||
|
|
|
|||
|
|
@ -288,5 +288,22 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"allow_multiple_items": True,
|
||||
"allow_version_control": False,
|
||||
"extensions": []
|
||||
},
|
||||
{
|
||||
"product_type": "audio",
|
||||
"identifier": "",
|
||||
"label": "Audio ",
|
||||
"icon": "fa5s.file-audio",
|
||||
"default_variants": [
|
||||
"Main"
|
||||
],
|
||||
"description": "Audio product",
|
||||
"detailed_description": "Audio files for review or final delivery",
|
||||
"allow_sequences": False,
|
||||
"allow_multiple_items": False,
|
||||
"allow_version_control": False,
|
||||
"extensions": [
|
||||
".wav"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import logging
|
|||
from pyblish.api import Instance as PyblishInstance
|
||||
|
||||
from tests.lib.testing_classes import BaseTest
|
||||
from openpype.plugins.publish.validate_sequence_frames import (
|
||||
from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import (
|
||||
ValidateSequenceFrames
|
||||
)
|
||||
|
||||
|
|
@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
data = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1002,
|
||||
"representations": []
|
||||
"representations": [],
|
||||
"assetEntity": {
|
||||
"data": {
|
||||
"clipIn": 1001,
|
||||
"clipOut": 1002,
|
||||
}
|
||||
}
|
||||
}
|
||||
yield Instance
|
||||
|
||||
|
|
@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1001
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1001
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
|
||||
plugin.process(instance)
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.1001.v001.exr",
|
||||
"Main_beauty.1002.v001.exr"]])
|
||||
def test_validate_sequence_frames_wrong_name(self, instance,
|
||||
plugin, files):
|
||||
# tests for names with number inside, caused clique failure before
|
||||
representations = [
|
||||
{
|
||||
"ext": "exr",
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must detect single collection" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames_possible_wrong_name(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must not have remainder" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames__correct_ext(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"ext": "ass.gz",
|
||||
|
|
@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
instance.data["slate"] = True
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for delivery functions."""
|
||||
from openpype.lib.delivery import collect_frames
|
||||
from openpype.lib import collect_frames
|
||||
|
||||
|
||||
def test_collect_frames_multi_sequence():
|
||||
|
|
@ -153,4 +153,3 @@ def test_collect_frames_single_file():
|
|||
|
||||
print(ret)
|
||||
assert ret == expected, "Not matching"
|
||||
|
||||
|
|
|
|||
83
tests/unit/openpype/lib/test_event_system.py
Normal file
83
tests/unit/openpype/lib/test_event_system.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from openpype.lib.events import EventSystem, QueuedEventSystem
|
||||
|
||||
|
||||
def test_default_event_system():
|
||||
output = []
|
||||
expected_output = [3, 2, 1]
|
||||
event_system = EventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_base_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_manual_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem(auto_execute=False)
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
while True:
|
||||
if event_system.process_next_event() is None:
|
||||
break
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
|
@ -12,16 +12,19 @@
|
|||
removes temporary databases (?)
|
||||
"""
|
||||
import pytest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from tests.lib.testing_classes import ModuleUnitTest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.modules.sync_server.utils import SiteAlreadyPresentError
|
||||
|
||||
|
||||
|
||||
class TestSiteOperation(ModuleUnitTest):
|
||||
|
||||
REPRESENTATION_ID = "60e578d0c987036c6a7b741d"
|
||||
|
||||
TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt",
|
||||
TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX",
|
||||
"test_site_operations.zip", '')]
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
|
@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest):
|
|||
@pytest.mark.usefixtures("setup_sync_server_module")
|
||||
def test_add_site_again(self, dbcon, setup_sync_server_module):
|
||||
"""Depends on test_add_site, must throw exception."""
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(SiteAlreadyPresentError):
|
||||
setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
|
||||
self.REPRESENTATION_ID,
|
||||
site_name='test_site')
|
||||
|
|
|
|||
98
tools/docker_build.ps1
Normal file
98
tools/docker_build.ps1
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
$current_dir = Get-Location
|
||||
$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
$repo_root = (Get-Item $script_dir).parent.FullName
|
||||
|
||||
$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell"
|
||||
|
||||
function Exit-WithCode($exitcode) {
|
||||
# Only exit this host process if it's a child of another PowerShell parent process...
|
||||
$parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId
|
||||
$parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name
|
||||
if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) }
|
||||
|
||||
exit $exitcode
|
||||
}
|
||||
|
||||
function Restore-Cwd() {
|
||||
$tmp_current_dir = Get-Location
|
||||
if ("$tmp_current_dir" -ne "$current_dir") {
|
||||
Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray
|
||||
Set-Location -Path $current_dir
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Container {
|
||||
if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) {
|
||||
Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
$id = Get-Content "$($repo_root)\build\docker-image.id"
|
||||
Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White
|
||||
$cid = docker create $id bash
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
return $cid
|
||||
}
|
||||
|
||||
function Change-Cwd() {
|
||||
Set-Location -Path $repo_root
|
||||
}
|
||||
|
||||
function New-DockerBuild {
|
||||
$version_file = Get-Content -Path "$($repo_root)\openpype\version.py"
|
||||
$result = [regex]::Matches($version_file, '__version__ = "(?<version>\d+\.\d+.\d+.*)"')
|
||||
$openpype_version = $result[0].Groups['version'].Value
|
||||
$startTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White
|
||||
$variant = $args[0]
|
||||
if ($variant.Length -eq 0) {
|
||||
$dockerfile = "$($repo_root)\Dockerfile"
|
||||
} else {
|
||||
$dockerfile = "$( $repo_root )\Dockerfile.$variant"
|
||||
}
|
||||
if (-not (Test-Path -PathType Leaf -Path $dockerfile)) {
|
||||
Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White
|
||||
|
||||
$build_dir = "$($repo_root)\build"
|
||||
if (-not(Test-Path $build_dir)) {
|
||||
New-Item -ItemType Directory -Path $build_dir
|
||||
}
|
||||
Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray
|
||||
try {
|
||||
Remove-Item -Recurse -Force "$($build_dir)\*"
|
||||
} catch {
|
||||
Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray
|
||||
Write-Color -Text $_.Exception.Message -Color Red
|
||||
Exit-WithCode 1
|
||||
}
|
||||
|
||||
Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White
|
||||
docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile .
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White
|
||||
$cid = Get-Container
|
||||
|
||||
docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build"
|
||||
docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build"
|
||||
|
||||
$endTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
try {
|
||||
New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory."
|
||||
} catch {}
|
||||
Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray
|
||||
}
|
||||
|
||||
Change-Cwd
|
||||
New-DockerBuild $ARGS
|
||||
Loading…
Add table
Add a link
Reference in a new issue