Merge branch 'develop' into enhancement/5516_Houdini-Reset-FPS

This commit is contained in:
Kayla Man 2023-08-31 16:20:23 +08:00 committed by GitHub
commit d019496d99
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 810 additions and 197 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.16.5-nightly.3
- 3.16.5-nightly.2
- 3.16.5-nightly.1
- 3.16.4
@ -134,7 +135,6 @@ body:
- 3.14.8
- 3.14.8-nightly.4
- 3.14.8-nightly.3
- 3.14.8-nightly.2
validations:
required: true
- type: dropdown

View file

@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
#### Clone repository:
```sh
git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
git clone --recurse-submodules git@github.com:ynput/OpenPype.git
```
#### To build OpenPype:
@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7
If all is successful, you'll find built OpenPype in `./build/` folder.
Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script.
This could be used even for building linux build (with argument `centos7` or `debian`)
#### Manual build
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.

View file

@ -83,10 +83,10 @@ def _get_subsets(
project_name,
subset_ids,
subset_names,
folder_ids,
names_by_folder_ids,
active,
fields
folder_ids=folder_ids,
names_by_folder_ids=names_by_folder_ids,
active=active,
fields=fields,
):
yield convert_v4_subset_to_v3(subset)

View file

@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
}
if self.selected_nodes:
parms["soppath"] = self.selected_nodes[0].path()
parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
instance_node.setParms(parms)
@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator):
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
def get_sop_node_path(self, selected_node):
"""Get Sop Path of the selected node.
Although Houdini allows ObjNode path on `sop_path` for the
the ROP node, we prefer it set to the SopNode path explicitly.
"""
# Allow sop level paths (e.g. /obj/geo1/box1)
if isinstance(selected_node, hou.SopNode):
self.log.debug(
"Valid SopNode selection, 'SOP Path' in ROP will"
" be set to '%s'.", selected_node.path()
)
return selected_node.path()
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
# but do not allow other object level nodes types like cameras, etc.
elif isinstance(selected_node, hou.ObjNode) and \
selected_node.type().name() == "geo":
# Try to find output node.
sop_node = self.get_obj_output(selected_node)
if sop_node:
self.log.debug(
"Valid ObjNode selection, 'SOP Path' in ROP will "
"be set to the child path '%s'.", sop_node.path()
)
return sop_node.path()
self.log.debug(
"Selection isn't valid. 'SOP Path' in ROP will be empty."
)
return ""
def get_obj_output(self, obj_node):
"""Try to find output node.
If any output nodes are present, return the output node with
the minimum 'outputidx'
If no output nodes are present, return the node with display flag
If no nodes are present at all, return None
"""
outputs = obj_node.subnetOutputs()
# if obj_node is empty
if not outputs:
return
# if obj_node has one output child whether its
# sop output node or a node with the render flag
elif len(outputs) == 1:
return outputs[0]
# if there are more than one, then it has multiple output nodes
# return the one with the minimum 'outputidx'
else:
return min(outputs,
key=lambda node: node.evalParm('outputidx'))

View file

@ -2,7 +2,19 @@
<mainMenu>
<menuBar>
<subMenu id="openpype_menu">
<label>OpenPype</label>
<labelExpression><![CDATA[
import os
return os.environ.get("AVALON_LABEL") or "OpenPype"
]]></labelExpression>
<actionItem id="asset_name">
<labelExpression><![CDATA[
from openpype.pipeline import get_current_asset_name, get_current_task_name
label = "{}, {}".format(get_current_asset_name(), get_current_task_name())
return label
]]></labelExpression>
</actionItem>
<separatorItem/>
<scriptItem id="openpype_create">
<label>Create...</label>

View file

@ -543,6 +543,9 @@ def list_instances(creator_id=None):
For SubsetManager
Args:
creator_id (Optional[str]): creator identifier
Returns:
(list) of dictionaries matching instances format
"""
@ -575,10 +578,13 @@ def list_instances(creator_id=None):
if creator_id and instance_data["creator_identifier"] != creator_id:
continue
if instance_data["instance_id"] in instance_ids:
instance_id = instance_data.get("instance_id")
if not instance_id:
pass
elif instance_id in instance_ids:
instance_data.pop("instance_id")
else:
instance_ids.add(instance_data["instance_id"])
instance_ids.add(instance_id)
# node name could change, so update subset name data
_update_subset_name_data(instance_data, node)

View file

@ -1,3 +1,5 @@
from collections import defaultdict
import pyblish.api
from openpype.pipeline.publish import get_errored_instances_from_context
from openpype.hosts.nuke.api.lib import (
@ -87,6 +89,11 @@ class ValidateNukeWriteNode(
correct_data
))
# Collect key values of same type in a list.
values_by_name = defaultdict(list)
for knob_data in correct_data["knobs"]:
values_by_name[knob_data["name"]].append(knob_data["value"])
for knob_data in correct_data["knobs"]:
knob_type = knob_data["type"]
self.log.debug("__ knob_type: {}".format(
@ -105,28 +112,33 @@ class ValidateNukeWriteNode(
)
key = knob_data["name"]
value = knob_data["value"]
values = values_by_name[key]
node_value = write_node[key].value()
# fix type differences
if type(node_value) in (int, float):
try:
if isinstance(value, list):
value = color_gui_to_int(value)
else:
value = float(value)
node_value = float(node_value)
except ValueError:
value = str(value)
else:
value = str(value)
node_value = str(node_value)
fixed_values = []
for value in values:
if type(node_value) in (int, float):
try:
self.log.debug("__ key: {} | value: {}".format(
key, value
if isinstance(value, list):
value = color_gui_to_int(value)
else:
value = float(value)
node_value = float(node_value)
except ValueError:
value = str(value)
else:
value = str(value)
node_value = str(node_value)
fixed_values.append(value)
self.log.debug("__ key: {} | values: {}".format(
key, fixed_values
))
if (
node_value != value
node_value not in fixed_values
and key != "file"
and key != "tile_color"
):

View file

@ -1,4 +1,6 @@
import clique
import os
import re
import pyblish.api
@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
representations = instance.data.get("representations")
for repr in representations:
data = instance.data.get("assetEntity", {}).get("data", {})
patterns = [clique.PATTERNS["frames"]]
repr_files = repr["files"]
if isinstance(repr_files, str):
continue
ext = repr.get("ext")
if not ext:
_, ext = os.path.splitext(repr_files[0])
elif not ext.startswith("."):
ext = ".{}".format(ext)
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
re.escape(ext))
patterns = [pattern]
collections, remainder = clique.assemble(
repr["files"], minimum_items=1, patterns=patterns)
@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
collection = collections[0]
frames = list(collection.indexes)
if instance.data.get("slate"):
# Slate is not part of the frame range
frames = frames[1:]
current_range = (frames[0], frames[-1])
required_range = (data["clipIn"],
data["clipOut"])

View file

@ -3,6 +3,7 @@ import os
import re
import copy
import inspect
import collections
import logging
import weakref
from uuid import uuid4
@ -340,8 +341,8 @@ class EventSystem(object):
event.emit()
return event
def emit_event(self, event):
"""Emit event object.
def _process_event(self, event):
"""Process event topic and trigger callbacks.
Args:
event (Event): Prepared event with topic and data.
@ -356,6 +357,91 @@ class EventSystem(object):
for callback in invalid_callbacks:
self._registered_callbacks.remove(callback)
def emit_event(self, event):
"""Emit event object.
Args:
event (Event): Prepared event with topic and data.
"""
self._process_event(event)
class QueuedEventSystem(EventSystem):
"""Events are automatically processed in queue.
If callback triggers another event, the event is not processed until
all callbacks of previous event are processed.
Allows to implement custom event process loop by changing 'auto_execute'.
Note:
This probably should be default behavior of 'EventSystem'. Changing it
now could cause problems in existing code.
Args:
auto_execute (Optional[bool]): If 'True', events are processed
automatically. Custom loop calling 'process_next_event'
must be implemented when set to 'False'.
"""
def __init__(self, auto_execute=True):
super(QueuedEventSystem, self).__init__()
self._event_queue = collections.deque()
self._current_event = None
self._auto_execute = auto_execute
def __len__(self):
return self.count()
def count(self):
"""Get number of events in queue.
Returns:
int: Number of events in queue.
"""
return len(self._event_queue)
def process_next_event(self):
"""Process next event in queue.
Should be used only if 'auto_execute' is set to 'False'. Only single
event is processed.
Returns:
Union[Event, None]: Processed event.
"""
if self._current_event is not None:
raise ValueError("An event is already in progress.")
if not self._event_queue:
return None
event = self._event_queue.popleft()
self._current_event = event
self._process_event(event)
self._current_event = None
return event
def emit_event(self, event):
"""Emit event object.
Args:
event (Event): Prepared event with topic and data.
"""
if not self._auto_execute or self._current_event is not None:
self._event_queue.append(event)
return
self._event_queue.append(event)
while self._event_queue:
event = self._event_queue.popleft()
self._current_event = event
self._process_event(event)
self._current_event = None
class GlobalEventSystem:
"""Event system living in global scope of process.

View file

@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
addons_info = _get_ayon_addons_information()
if not addons_info:
return v3_addons_to_skip
addons_dir = os.path.join(
appdirs.user_data_dir("AYON", "Ynput"),
"addons"
)
addons_dir = os.environ.get("AYON_ADDONS_DIR")
if not addons_dir:
addons_dir = os.path.join(
appdirs.user_data_dir("AYON", "Ynput"),
"addons"
)
if not os.path.exists(addons_dir):
log.warning("Addons directory does not exists. Path \"{}\"".format(
addons_dir

View file

@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists.
from maya import cmds
import pyblish.api
from openpype.pipeline.publish import KnownPublishError
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
if k in default_servers
}
msg = (
"\"{}\" server on instance is not enabled in project settings."
" Enabled project servers:\n{}".format(
instance_server, project_enabled_servers
if instance_server not in project_enabled_servers:
msg = (
"\"{}\" server on instance is not enabled in project settings."
" Enabled project servers:\n{}".format(
instance_server, project_enabled_servers
)
)
)
assert instance_server in project_enabled_servers, msg
raise KnownPublishError(msg)
self.log.debug("Using project approved server.")
return project_enabled_servers[instance_server]

View file

@ -1,31 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Scene setting</title>
<title>Deadline Pools</title>
<description>
## Invalid Deadline pools found
## Invalid Deadline pools found
Configured pools don't match what is set in Deadline.
Configured pools don't match available pools in Deadline.
{invalid_value_str}
### How to repair?
### How to repair?
If your instance had deadline pools set on creation, remove or
change them.
If your instance had deadline pools set on creation, remove or
change them.
In other cases inform admin to change them in Settings.
In other cases inform admin to change them in Settings.
Available deadline pools:
{pools_str}
Available deadline pools {pools_str}.
</description>
<detail>
### __Detailed Info__
### __Detailed Info__
This error is shown when deadline pool is not on Deadline anymore. It
could happen in case of republish old workfile which was created with
previous deadline pools,
or someone changed pools on Deadline side, but didn't modify Openpype
Settings.
This error is shown when a configured pool is not available on Deadline. It
can happen when publishing old workfiles which were created with previous
deadline pools, or someone changed the available pools in Deadline,
but didn't modify Openpype Settings to match the changes.
</detail>
</error>
</root>

View file

@ -12,7 +12,9 @@ from openpype.pipeline import (
legacy_io,
OpenPypePyblishPluginMixin
)
from openpype.settings import get_project_settings
from openpype.pipeline.publish.lib import (
replace_with_published_scene_path
)
from openpype.hosts.max.api.lib import (
get_current_renderer,
get_multipass_setting
@ -247,7 +249,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
if instance.data["renderer"] == "Redshift_Renderer":
self.log.debug("Using Redshift...published scene wont be used..")
replace_in_path = False
return replace_in_path
return replace_with_published_scene_path(
instance, replace_in_path)
@staticmethod
def _iter_expected_files(exp):

View file

@ -1,8 +1,7 @@
import os
import requests
import pyblish.api
from openpype_modules.deadline.abstract_submit_deadline import requests_get
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
"""Validate Deadline Web Service is running"""
@ -12,34 +11,25 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
hosts = ["maya", "nuke"]
families = ["renderlayer", "render"]
# cache
responses = {}
def process(self, instance):
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
# if custom one is set in instance, use that
if instance.data.get("deadlineUrl"):
deadline_url = instance.data.get("deadlineUrl")
self.log.info(
"We have deadline URL on instance {}".format(
deadline_url))
self.log.debug(
"We have deadline URL on instance {}".format(deadline_url)
)
assert deadline_url, "Requires Deadline Webservice URL"
# Check response
response = self._requests_get(deadline_url)
if deadline_url not in self.responses:
self.responses[deadline_url] = requests_get(deadline_url)
response = self.responses[deadline_url]
assert response.ok, "Response must be ok"
assert response.text.startswith("Deadline Web Service "), (
"Web service did not respond with 'Deadline Web Service'"
)
def _requests_get(self, *args, **kwargs):
""" Wrapper for requests, disabling SSL certificate validation if
DONT_VERIFY_SSL environment variable is found. This is useful when
Deadline or Muster server are running with self-signed certificates
and their certificate is not added to trusted certificates on
client machines.
WARNING: disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
return requests.get(*args, **kwargs)

View file

@ -25,33 +25,58 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
"maxrender"]
optional = True
# cache
pools_per_url = {}
def process(self, instance):
if not self.is_active(instance.data):
return
if not instance.data.get("farm"):
self.log.debug("Skipping local instance.")
return
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
self.log.info("deadline_url::{}".format(deadline_url))
pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log)
self.log.info("pools::{}".format(pools))
formatting_data = {
"pools_str": ",".join(pools)
}
deadline_url = self.get_deadline_url(instance)
pools = self.get_pools(deadline_url)
invalid_pools = {}
primary_pool = instance.data.get("primaryPool")
if primary_pool and primary_pool not in pools:
msg = "Configured primary '{}' not present on Deadline".format(
instance.data["primaryPool"])
formatting_data["invalid_value_str"] = msg
raise PublishXmlValidationError(self, msg,
formatting_data=formatting_data)
invalid_pools["primary"] = primary_pool
secondary_pool = instance.data.get("secondaryPool")
if secondary_pool and secondary_pool not in pools:
msg = "Configured secondary '{}' not present on Deadline".format(
instance.data["secondaryPool"])
formatting_data["invalid_value_str"] = msg
raise PublishXmlValidationError(self, msg,
formatting_data=formatting_data)
invalid_pools["secondary"] = secondary_pool
if invalid_pools:
message = "\n".join(
"{} pool '{}' not available on Deadline".format(key.title(),
pool)
for key, pool in invalid_pools.items()
)
raise PublishXmlValidationError(
plugin=self,
message=message,
formatting_data={"pools_str": ", ".join(pools)}
)
def get_deadline_url(self, instance):
# get default deadline webservice url from deadline module
deadline_url = instance.context.data["defaultDeadline"]
if instance.data.get("deadlineUrl"):
# if custom one is set in instance, use that
deadline_url = instance.data.get("deadlineUrl")
return deadline_url
def get_pools(self, deadline_url):
if deadline_url not in self.pools_per_url:
self.log.debug(
"Querying available pools for Deadline url: {}".format(
deadline_url)
)
pools = DeadlineModule.get_deadline_pools(deadline_url,
log=self.log)
self.log.info("Available pools: {}".format(pools))
self.pools_per_url[deadline_url] = pools
return self.pools_per_url[deadline_url]

View file

@ -91,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin):
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
exe = FileUtils.SearchFileList(exe_list)
expanded_paths = []
for path in exe_list.split(";"):
if path.startswith("~"):
path = os.path.expanduser(path)
expanded_paths.append(path)
exe = FileUtils.SearchFileList(";".join(expanded_paths))
if exe == "":
self.FailRender(

View file

@ -547,7 +547,14 @@ def get_ayon_executable():
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
return exe_list
# Expand user paths
expanded_paths = []
for path in exe_list.split(";"):
if path.startswith("~"):
path = os.path.expanduser(path)
expanded_paths.append(path)
return ";".join(expanded_paths)
def inject_render_job_id(deadlinePlugin):

View file

@ -464,9 +464,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None):
for option, value in settings.items():
if logger:
logger.debug("Plugin {} - Attr: {} -> {}".format(
option, value, plugin.__name__
))
logger.debug("Plugin %s - Attr: %s -> %s",
plugin.__name__, option, value)
setattr(plugin, option, value)

View file

@ -7,12 +7,12 @@ from openpype.pipeline.publish import (
class ValidatePublishDir(pyblish.api.InstancePlugin):
"""Validates if 'publishDir' is a project directory
"""Validates if files are being published into a project directory
'publishDir' is collected based on publish templates. In specific cases
('source' template) source folder of items is used as a 'publishDir', this
validates if it is inside any project dir for the project.
(eg. files are not published from local folder, unaccessible for studio'
In specific cases ('source' template - in place publishing) source folder
of published items is used as a regular `publish` dir.
This validates if it is inside any project dir for the project.
(eg. files are not published from local folder, inaccessible for studio')
"""
@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
anatomy = instance.context.data["anatomy"]
# original_dirname must be convertable to rootless path
# in other case it is path inside of root folder for the project
success, _ = anatomy.find_root_template_from_path(original_dirname)
formatting_data = {
@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
formatting_data=formatting_data)
def _get_template_name_from_instance(self, instance):
"""Find template which will be used during integration."""
project_name = instance.context.data["projectName"]
host_name = instance.context.data["hostName"]
anatomy_data = instance.data["anatomyData"]
family = anatomy_data["family"]
family = self.family_mapping.get("family") or family
family = self.family_mapping.get(family) or family
task_info = anatomy_data.get("task") or {}
return get_publish_template_name(

View file

@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin):
# TODO: Remove full non-html version upon drop of old publisher
msg = (
"Version '{0}' from instance '{1}' that you are "
" trying to publish is lower or equal to an existing version "
" in the database. Version in database: '{2}'."
"trying to publish is lower or equal to an existing version "
"in the database. Version in database: '{2}'."
"Please version up your workfile to a higher version number "
"than: '{2}'."
).format(version, instance.data["name"], latest_version)
msg_html = (
"Version <b>{0}</b> from instance <b>{1}</b> that you are "
" trying to publish is lower or equal to an existing version "
" in the database. Version in database: <b>{2}</b>.<br><br>"
"trying to publish is lower or equal to an existing version "
"in the database. Version in database: <b>{2}</b>.<br><br>"
"Please version up your workfile to a higher version number "
"than: <b>{2}</b>."
).format(version, instance.data["name"], latest_version)

View file

@ -256,6 +256,23 @@
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": []
},
{
"family": "audio",
"identifier": "",
"label": "Audio ",
"icon": "fa5s.file-audio",
"default_variants": [
"Main"
],
"description": "Audio product",
"detailed_description": "Audio files for review or final delivery",
"allow_sequences": false,
"allow_multiple_items": false,
"allow_version_control": false,
"extensions": [
".wav"
]
}
],
"editorial_creators": {

View file

@ -75,7 +75,7 @@ class TasksModel(QtGui.QStandardItemModel):
def set_asset_id(self, asset_id):
asset_doc = None
if self._context_is_valid():
if asset_id and self._context_is_valid():
project_name = self._get_current_project()
asset_doc = get_asset_by_id(
project_name, asset_id, fields=["data.tasks"]

View file

@ -48,6 +48,11 @@ from ._api import (
patch,
delete,
get_timeout,
set_timeout,
get_max_retries,
set_max_retries,
get_event,
get_events,
dispatch_event,
@ -245,6 +250,11 @@ __all__ = (
"patch",
"delete",
"get_timeout",
"set_timeout",
"get_max_retries",
"set_max_retries",
"get_event",
"get_events",
"dispatch_event",

View file

@ -474,6 +474,26 @@ def delete(*args, **kwargs):
return con.delete(*args, **kwargs)
def get_timeout(*args, **kwargs):
con = get_server_api_connection()
return con.get_timeout(*args, **kwargs)
def set_timeout(*args, **kwargs):
con = get_server_api_connection()
return con.set_timeout(*args, **kwargs)
def get_max_retries(*args, **kwargs):
con = get_server_api_connection()
return con.get_max_retries(*args, **kwargs)
def set_max_retries(*args, **kwargs):
con = get_server_api_connection()
return con.set_max_retries(*args, **kwargs)
def get_event(*args, **kwargs):
con = get_server_api_connection()
return con.get_event(*args, **kwargs)

View file

@ -1,18 +1,21 @@
# Environments where server url and api key are stored for global connection
SERVER_URL_ENV_KEY = "AYON_SERVER_URL"
SERVER_API_ENV_KEY = "AYON_API_KEY"
SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT"
SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES"
# Backwards compatibility
SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY
# --- User ---
DEFAULT_USER_FIELDS = {
"roles",
"accessGroups",
"defaultAccessGroups",
"name",
"isService",
"isManager",
"isGuest",
"isAdmin",
"defaultRoles",
"createdAt",
"active",
"hasPassword",

View file

@ -247,9 +247,11 @@ def products_graphql_query(fields):
query = GraphQlQuery("ProductsQuery")
project_name_var = query.add_variable("projectName", "String!")
folder_ids_var = query.add_variable("folderIds", "[String!]")
product_ids_var = query.add_variable("productIds", "[String!]")
product_names_var = query.add_variable("productNames", "[String!]")
folder_ids_var = query.add_variable("folderIds", "[String!]")
product_types_var = query.add_variable("productTypes", "[String!]")
statuses_var = query.add_variable("statuses", "[String!]")
project_field = query.add_field("project")
project_field.set_filter("name", project_name_var)
@ -258,6 +260,8 @@ def products_graphql_query(fields):
products_field.set_filter("ids", product_ids_var)
products_field.set_filter("names", product_names_var)
products_field.set_filter("folderIds", folder_ids_var)
products_field.set_filter("productTypes", product_types_var)
products_field.set_filter("statuses", statuses_var)
nested_fields = fields_to_dict(set(fields))
add_links_fields(products_field, nested_fields)

View file

@ -2,6 +2,7 @@ import os
import re
import io
import json
import time
import logging
import collections
import platform
@ -26,6 +27,8 @@ except ImportError:
from json import JSONDecodeError as RequestsJSONDecodeError
from .constants import (
SERVER_TIMEOUT_ENV_KEY,
SERVER_RETRIES_ENV_KEY,
DEFAULT_PRODUCT_TYPE_FIELDS,
DEFAULT_PROJECT_FIELDS,
DEFAULT_FOLDER_FIELDS,
@ -127,6 +130,8 @@ class RestApiResponse(object):
@property
def text(self):
if self._response is None:
return self.detail
return self._response.text
@property
@ -135,6 +140,8 @@ class RestApiResponse(object):
@property
def headers(self):
if self._response is None:
return {}
return self._response.headers
@property
@ -148,6 +155,8 @@ class RestApiResponse(object):
@property
def content(self):
if self._response is None:
return b""
return self._response.content
@property
@ -339,7 +348,11 @@ class ServerAPI(object):
variable value 'AYON_CERT_FILE' by default.
create_session (Optional[bool]): Create session for connection if
token is available. Default is True.
timeout (Optional[float]): Timeout for requests.
max_retries (Optional[int]): Number of retries for requests.
"""
_default_timeout = 10.0
_default_max_retries = 3
def __init__(
self,
@ -352,6 +365,8 @@ class ServerAPI(object):
ssl_verify=None,
cert=None,
create_session=True,
timeout=None,
max_retries=None,
):
if not base_url:
raise ValueError("Invalid server URL {}".format(str(base_url)))
@ -370,6 +385,13 @@ class ServerAPI(object):
)
self._sender = sender
self._timeout = None
self._max_retries = None
# Set timeout and max retries based on passed values
self.set_timeout(timeout)
self.set_max_retries(max_retries)
if ssl_verify is None:
# Custom AYON env variable for CA file or 'True'
# - that should cover most default behaviors in 'requests'
@ -474,6 +496,87 @@ class ServerAPI(object):
ssl_verify = property(get_ssl_verify, set_ssl_verify)
cert = property(get_cert, set_cert)
@classmethod
def get_default_timeout(cls):
"""Default value for requests timeout.
First looks for environment variable SERVER_TIMEOUT_ENV_KEY which
can affect timeout value. If not available then use class
attribute '_default_timeout'.
Returns:
float: Timeout value in seconds.
"""
try:
return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY))
except (ValueError, TypeError):
pass
return cls._default_timeout
@classmethod
def get_default_max_retries(cls):
"""Default value for requests max retries.
First looks for environment variable SERVER_RETRIES_ENV_KEY, which
can affect max retries value. If not available then use class
attribute '_default_max_retries'.
Returns:
int: Max retries value.
"""
try:
return int(os.environ.get(SERVER_RETRIES_ENV_KEY))
except (ValueError, TypeError):
pass
return cls._default_max_retries
def get_timeout(self):
"""Current value for requests timeout.
Returns:
float: Timeout value in seconds.
"""
return self._timeout
def set_timeout(self, timeout):
"""Change timeout value for requests.
Args:
timeout (Union[float, None]): Timeout value in seconds.
"""
if timeout is None:
timeout = self.get_default_timeout()
self._timeout = float(timeout)
def get_max_retries(self):
"""Current value for requests max retries.
Returns:
int: Max retries value.
"""
return self._max_retries
def set_max_retries(self, max_retries):
"""Change max retries value for requests.
Args:
max_retries (Union[int, None]): Max retries value.
"""
if max_retries is None:
max_retries = self.get_default_max_retries()
self._max_retries = int(max_retries)
timeout = property(get_timeout, set_timeout)
max_retries = property(get_max_retries, set_max_retries)
@property
def access_token(self):
"""Access token used for authorization to server.
@ -890,9 +993,17 @@ class ServerAPI(object):
for attr, filter_value in filters.items():
query.set_variable_value(attr, filter_value)
# Backwards compatibility for server 0.3.x
# - will be removed in future releases
major, minor, _, _, _ = self.server_version_tuple
access_groups_field = "accessGroups"
if major == 0 and minor <= 3:
access_groups_field = "roles"
for parsed_data in query.continuous_query(self):
for user in parsed_data["users"]:
user["roles"] = json.loads(user["roles"])
user[access_groups_field] = json.loads(
user[access_groups_field])
yield user
def get_user(self, username=None):
@ -1004,6 +1115,10 @@ class ServerAPI(object):
logout_from_server(self._base_url, self._access_token)
def _do_rest_request(self, function, url, **kwargs):
kwargs.setdefault("timeout", self.timeout)
max_retries = kwargs.get("max_retries", self.max_retries)
if max_retries < 1:
max_retries = 1
if self._session is None:
# Validate token if was not yet validated
# - ignore validation if we're in middle of
@ -1023,38 +1138,54 @@ class ServerAPI(object):
elif isinstance(function, RequestType):
function = self._session_functions_mapping[function]
try:
response = function(url, **kwargs)
response = None
new_response = None
for _ in range(max_retries):
try:
response = function(url, **kwargs)
break
except ConnectionRefusedError:
# Server may be restarting
new_response = RestApiResponse(
None,
{"detail": "Unable to connect the server. Connection refused"}
)
except requests.exceptions.Timeout:
# Connection timed out
new_response = RestApiResponse(
None,
{"detail": "Connection timed out."}
)
except requests.exceptions.ConnectionError:
# Other connection error (ssl, etc) - does not make sense to
# try call server again
new_response = RestApiResponse(
None,
{"detail": "Unable to connect the server. Connection error"}
)
break
time.sleep(0.1)
if new_response is not None:
return new_response
content_type = response.headers.get("Content-Type")
if content_type == "application/json":
try:
new_response = RestApiResponse(response)
except JSONDecodeError:
new_response = RestApiResponse(
None,
{
"detail": "The response is not a JSON: {}".format(
response.text)
}
)
except ConnectionRefusedError:
new_response = RestApiResponse(
None,
{"detail": "Unable to connect the server. Connection refused"}
)
except requests.exceptions.ConnectionError:
new_response = RestApiResponse(
None,
{"detail": "Unable to connect the server. Connection error"}
)
else:
content_type = response.headers.get("Content-Type")
if content_type == "application/json":
try:
new_response = RestApiResponse(response)
except JSONDecodeError:
new_response = RestApiResponse(
None,
{
"detail": "The response is not a JSON: {}".format(
response.text)
}
)
elif content_type in ("image/jpeg", "image/png"):
new_response = RestApiResponse(response)
else:
new_response = RestApiResponse(response)
new_response = RestApiResponse(response)
self.log.debug("Response {}".format(str(new_response)))
return new_response
@ -1747,7 +1878,15 @@ class ServerAPI(object):
entity_type_defaults = DEFAULT_WORKFILE_INFO_FIELDS
elif entity_type == "user":
entity_type_defaults = DEFAULT_USER_FIELDS
entity_type_defaults = set(DEFAULT_USER_FIELDS)
# Backwards compatibility for server 0.3.x
# - will be removed in future releases
major, minor, _, _, _ = self.server_version_tuple
if major == 0 and minor <= 3:
entity_type_defaults.discard("accessGroups")
entity_type_defaults.discard("defaultAccessGroups")
entity_type_defaults.add("roles")
entity_type_defaults.add("defaultRoles")
else:
raise ValueError("Unknown entity type \"{}\"".format(entity_type))
@ -2124,7 +2263,12 @@ class ServerAPI(object):
server.
"""
result = self.get("desktop/dependency_packages")
endpoint = "desktop/dependencyPackages"
major, minor, _, _, _ = self.server_version_tuple
if major == 0 and minor <= 3:
endpoint = "desktop/dependency_packages"
result = self.get(endpoint)
result.raise_for_status()
return result.data
@ -3810,6 +3954,8 @@ class ServerAPI(object):
product_ids=None,
product_names=None,
folder_ids=None,
product_types=None,
statuses=None,
names_by_folder_ids=None,
active=True,
fields=None,
@ -3828,6 +3974,10 @@ class ServerAPI(object):
filtering.
folder_ids (Optional[Iterable[str]]): Ids of task parents.
Use 'None' if folder is direct child of project.
product_types (Optional[Iterable[str]]): Product types used for
filtering.
statuses (Optional[Iterable[str]]): Product statuses used for
filtering.
names_by_folder_ids (Optional[dict[str, Iterable[str]]]): Product
name filtering by folder id.
active (Optional[bool]): Filter active/inactive products.
@ -3862,6 +4012,18 @@ class ServerAPI(object):
if not filter_folder_ids:
return
filter_product_types = None
if product_types is not None:
filter_product_types = set(product_types)
if not filter_product_types:
return
filter_statuses = None
if statuses is not None:
filter_statuses = set(statuses)
if not filter_statuses:
return
# This will disable 'folder_ids' and 'product_names' filters
# - maybe could be enhanced in future?
if names_by_folder_ids is not None:
@ -3881,7 +4043,7 @@ class ServerAPI(object):
fields = set(fields) | {"id"}
if "attrib" in fields:
fields.remove("attrib")
fields |= self.get_attributes_fields_for_type("folder")
fields |= self.get_attributes_fields_for_type("product")
else:
fields = self.get_default_fields_for_type("product")
@ -3908,6 +4070,12 @@ class ServerAPI(object):
if filter_folder_ids:
filters["folderIds"] = list(filter_folder_ids)
if filter_product_types:
filters["productTypes"] = list(filter_product_types)
if filter_statuses:
filters["statuses"] = list(filter_statuses)
if product_ids:
filters["productIds"] = list(product_ids)

View file

@ -1,2 +1,2 @@
"""Package declaring Python API for Ayon server."""
__version__ = "0.3.5"
__version__ = "0.4.1"

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.16.5-nightly.2"
__version__ = "3.16.5-nightly.3"

View file

@ -19,7 +19,7 @@ import logging
from pyblish.api import Instance as PyblishInstance
from tests.lib.testing_classes import BaseTest
from openpype.plugins.publish.validate_sequence_frames import (
from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import (
ValidateSequenceFrames
)
@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest):
data = {
"frameStart": 1001,
"frameEnd": 1002,
"representations": []
"representations": [],
"assetEntity": {
"data": {
"clipIn": 1001,
"clipOut": 1002,
}
}
}
yield Instance
@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest):
]
instance.data["representations"] = representations
instance.data["frameEnd"] = 1001
instance.data["assetEntity"]["data"]["clipOut"] = 1001
plugin.process(instance)
@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest):
plugin.process(instance)
@pytest.mark.parametrize("files",
[["Main_beauty.1001.v001.exr",
"Main_beauty.1002.v001.exr"]])
def test_validate_sequence_frames_wrong_name(self, instance,
plugin, files):
# tests for names with number inside, caused clique failure before
representations = [
{
"ext": "exr",
"files": files,
}
]
instance.data["representations"] = representations
with pytest.raises(AssertionError) as excinfo:
plugin.process(instance)
assert ("Must detect single collection" in
str(excinfo.value))
@pytest.mark.parametrize("files",
[["Main_beauty.v001.1001.ass.gz",
"Main_beauty.v001.1002.ass.gz"]])
def test_validate_sequence_frames_possible_wrong_name(
self, instance, plugin, files):
# currently pattern fails on extensions with dots
representations = [
{
"files": files,
}
]
instance.data["representations"] = representations
with pytest.raises(AssertionError) as excinfo:
plugin.process(instance)
assert ("Must not have remainder" in
str(excinfo.value))
@pytest.mark.parametrize("files",
[["Main_beauty.v001.1001.ass.gz",
"Main_beauty.v001.1002.ass.gz"]])
def test_validate_sequence_frames__correct_ext(
self, instance, plugin, files):
# currently pattern fails on extensions with dots
representations = [
{
"ext": "ass.gz",
@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest):
]
instance.data["representations"] = representations
instance.data["frameEnd"] = 1003
instance.data["assetEntity"]["data"]["clipOut"] = 1003
plugin.process(instance)
@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest):
]
instance.data["representations"] = representations
instance.data["frameEnd"] = 1003
instance.data["assetEntity"]["data"]["clipOut"] = 1003
with pytest.raises(ValueError) as excinfo:
plugin.process(instance)
@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest):
]
instance.data["representations"] = representations
instance.data["frameEnd"] = 1003
instance.data["assetEntity"]["data"]["clipOut"] = 1003
with pytest.raises(AssertionError) as excinfo:
plugin.process(instance)
@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest):
instance.data["slate"] = True
instance.data["representations"] = representations
instance.data["frameEnd"] = 1003
instance.data["assetEntity"]["data"]["clipOut"] = 1003
plugin.process(instance)

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""Test suite for delivery functions."""
from openpype.lib.delivery import collect_frames
from openpype.lib import collect_frames
def test_collect_frames_multi_sequence():
@ -153,4 +153,3 @@ def test_collect_frames_single_file():
print(ret)
assert ret == expected, "Not matching"

View file

@ -0,0 +1,83 @@
from openpype.lib.events import EventSystem, QueuedEventSystem
def test_default_event_system():
output = []
expected_output = [3, 2, 1]
event_system = EventSystem()
def callback_1():
event_system.emit("topic.2", {}, None)
output.append(1)
def callback_2():
event_system.emit("topic.3", {}, None)
output.append(2)
def callback_3():
output.append(3)
event_system.add_callback("topic.1", callback_1)
event_system.add_callback("topic.2", callback_2)
event_system.add_callback("topic.3", callback_3)
event_system.emit("topic.1", {}, None)
assert output == expected_output, (
"Callbacks were not called in correct order")
def test_base_event_system_queue():
output = []
expected_output = [1, 2, 3]
event_system = QueuedEventSystem()
def callback_1():
event_system.emit("topic.2", {}, None)
output.append(1)
def callback_2():
event_system.emit("topic.3", {}, None)
output.append(2)
def callback_3():
output.append(3)
event_system.add_callback("topic.1", callback_1)
event_system.add_callback("topic.2", callback_2)
event_system.add_callback("topic.3", callback_3)
event_system.emit("topic.1", {}, None)
assert output == expected_output, (
"Callbacks were not called in correct order")
def test_manual_event_system_queue():
output = []
expected_output = [1, 2, 3]
event_system = QueuedEventSystem(auto_execute=False)
def callback_1():
event_system.emit("topic.2", {}, None)
output.append(1)
def callback_2():
event_system.emit("topic.3", {}, None)
output.append(2)
def callback_3():
output.append(3)
event_system.add_callback("topic.1", callback_1)
event_system.add_callback("topic.2", callback_2)
event_system.add_callback("topic.3", callback_3)
event_system.emit("topic.1", {}, None)
while True:
if event_system.process_next_event() is None:
break
assert output == expected_output, (
"Callbacks were not called in correct order")

View file

@ -12,16 +12,19 @@
removes temporary databases (?)
"""
import pytest
from bson.objectid import ObjectId
from tests.lib.testing_classes import ModuleUnitTest
from bson.objectid import ObjectId
from openpype.modules.sync_server.utils import SiteAlreadyPresentError
class TestSiteOperation(ModuleUnitTest):
REPRESENTATION_ID = "60e578d0c987036c6a7b741d"
TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt",
TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX",
"test_site_operations.zip", '')]
@pytest.fixture(scope="module")
@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest):
@pytest.mark.usefixtures("setup_sync_server_module")
def test_add_site_again(self, dbcon, setup_sync_server_module):
"""Depends on test_add_site, must throw exception."""
with pytest.raises(ValueError):
with pytest.raises(SiteAlreadyPresentError):
setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
self.REPRESENTATION_ID,
site_name='test_site')

98
tools/docker_build.ps1 Normal file
View file

@ -0,0 +1,98 @@
$current_dir = Get-Location
$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
$repo_root = (Get-Item $script_dir).parent.FullName
$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell"
function Exit-WithCode($exitcode) {
# Only exit this host process if it's a child of another PowerShell parent process...
$parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId
$parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name
if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) }
exit $exitcode
}
function Restore-Cwd() {
$tmp_current_dir = Get-Location
if ("$tmp_current_dir" -ne "$current_dir") {
Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray
Set-Location -Path $current_dir
}
}
function Get-Container {
if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) {
Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow
Restore-Cwd
Exit-WithCode 1
}
$id = Get-Content "$($repo_root)\build\docker-image.id"
Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White
$cid = docker create $id bash
if ($LASTEXITCODE -ne 0) {
Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow
Restore-Cwd
Exit-WithCode 1
}
return $cid
}
function Change-Cwd() {
Set-Location -Path $repo_root
}
function New-DockerBuild {
$version_file = Get-Content -Path "$($repo_root)\openpype\version.py"
$result = [regex]::Matches($version_file, '__version__ = "(?<version>\d+\.\d+.\d+.*)"')
$openpype_version = $result[0].Groups['version'].Value
$startTime = [int][double]::Parse((Get-Date -UFormat %s))
Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White
$variant = $args[0]
if ($variant.Length -eq 0) {
$dockerfile = "$($repo_root)\Dockerfile"
} else {
$dockerfile = "$( $repo_root )\Dockerfile.$variant"
}
if (-not (Test-Path -PathType Leaf -Path $dockerfile)) {
Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow
Restore-Cwd
Exit-WithCode 1
}
Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White
$build_dir = "$($repo_root)\build"
if (-not(Test-Path $build_dir)) {
New-Item -ItemType Directory -Path $build_dir
}
Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray
try {
Remove-Item -Recurse -Force "$($build_dir)\*"
} catch {
Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray
Write-Color -Text $_.Exception.Message -Color Red
Exit-WithCode 1
}
Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White
docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile .
if ($LASTEXITCODE -ne 0) {
Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red
Restore-Cwd
Exit-WithCode 1
}
Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White
$cid = Get-Container
docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build"
docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build"
$endTime = [int][double]::Parse((Get-Date -UFormat %s))
try {
New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory."
} catch {}
Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray
}
Change-Cwd
New-DockerBuild $ARGS