mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into enhancement/AY-4085_Houdini-adding-model-family
This commit is contained in:
commit
1fae389ebc
13 changed files with 52 additions and 48 deletions
|
|
@ -33,7 +33,7 @@ def load_scripts(paths):
|
||||||
if register:
|
if register:
|
||||||
try:
|
try:
|
||||||
register()
|
register()
|
||||||
except:
|
except: # noqa E722
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
else:
|
else:
|
||||||
print("\nWarning! '%s' has no register function, "
|
print("\nWarning! '%s' has no register function, "
|
||||||
|
|
@ -45,7 +45,7 @@ def load_scripts(paths):
|
||||||
if unregister:
|
if unregister:
|
||||||
try:
|
try:
|
||||||
unregister()
|
unregister()
|
||||||
except:
|
except: # noqa E722
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
def test_reload(mod):
|
def test_reload(mod):
|
||||||
|
|
@ -57,7 +57,7 @@ def load_scripts(paths):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return importlib.reload(mod)
|
return importlib.reload(mod)
|
||||||
except:
|
except: # noqa E722
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
def test_register(mod):
|
def test_register(mod):
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,9 @@ import json
|
||||||
from typing import Any, Dict, Union
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import ayon_api
|
|
||||||
|
|
||||||
from ayon_core.pipeline import (
|
from ayon_core.pipeline import (
|
||||||
get_current_project_name,
|
get_current_project_name,
|
||||||
get_current_folder_path,
|
|
||||||
get_current_task_name,
|
|
||||||
colorspace
|
colorspace
|
||||||
)
|
)
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ from ayon_core.lib import (
|
||||||
BoolDef,
|
BoolDef,
|
||||||
NumberDef,
|
NumberDef,
|
||||||
)
|
)
|
||||||
from ayon_core.pipeline import CreatedInstance
|
|
||||||
|
|
||||||
|
|
||||||
def _get_animation_attr_defs(cls):
|
def _get_animation_attr_defs(cls):
|
||||||
|
|
|
||||||
|
|
@ -35,8 +35,12 @@ class ImageCreator(Creator):
|
||||||
create_empty_group = False
|
create_empty_group = False
|
||||||
|
|
||||||
stub = api.stub() # only after PS is up
|
stub = api.stub() # only after PS is up
|
||||||
top_level_selected_items = stub.get_selected_layers()
|
|
||||||
if pre_create_data.get("use_selection"):
|
if pre_create_data.get("use_selection"):
|
||||||
|
try:
|
||||||
|
top_level_selected_items = stub.get_selected_layers()
|
||||||
|
except ValueError:
|
||||||
|
raise CreatorError("Cannot group locked Background layer!")
|
||||||
|
|
||||||
only_single_item_selected = len(top_level_selected_items) == 1
|
only_single_item_selected = len(top_level_selected_items) == 1
|
||||||
if (
|
if (
|
||||||
only_single_item_selected or
|
only_single_item_selected or
|
||||||
|
|
@ -50,11 +54,12 @@ class ImageCreator(Creator):
|
||||||
group = stub.group_selected_layers(product_name_from_ui)
|
group = stub.group_selected_layers(product_name_from_ui)
|
||||||
groups_to_create.append(group)
|
groups_to_create.append(group)
|
||||||
else:
|
else:
|
||||||
stub.select_layers(stub.get_layers())
|
|
||||||
try:
|
try:
|
||||||
|
stub.select_layers(stub.get_layers())
|
||||||
group = stub.group_selected_layers(product_name_from_ui)
|
group = stub.group_selected_layers(product_name_from_ui)
|
||||||
except:
|
except ValueError:
|
||||||
raise CreatorError("Cannot group locked Background layer!")
|
raise CreatorError("Cannot group locked Background layer!")
|
||||||
|
|
||||||
groups_to_create.append(group)
|
groups_to_create.append(group)
|
||||||
|
|
||||||
# create empty group if nothing selected
|
# create empty group if nothing selected
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
import os
|
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import pyblish.util
|
import pyblish.util
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -139,6 +139,7 @@ from .path_tools import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from .ayon_info import (
|
from .ayon_info import (
|
||||||
|
is_in_ayon_launcher_process,
|
||||||
is_running_from_build,
|
is_running_from_build,
|
||||||
is_using_ayon_console,
|
is_using_ayon_console,
|
||||||
is_staging_enabled,
|
is_staging_enabled,
|
||||||
|
|
@ -248,6 +249,7 @@ __all__ = [
|
||||||
|
|
||||||
"Logger",
|
"Logger",
|
||||||
|
|
||||||
|
"is_in_ayon_launcher_process",
|
||||||
"is_running_from_build",
|
"is_running_from_build",
|
||||||
"is_using_ayon_console",
|
"is_using_ayon_console",
|
||||||
"is_staging_enabled",
|
"is_staging_enabled",
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import json
|
import json
|
||||||
import datetime
|
import datetime
|
||||||
import platform
|
import platform
|
||||||
|
|
@ -25,6 +26,18 @@ def get_ayon_launcher_version():
|
||||||
return content["__version__"]
|
return content["__version__"]
|
||||||
|
|
||||||
|
|
||||||
|
def is_in_ayon_launcher_process():
|
||||||
|
"""Determine if current process is running from AYON launcher.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if running from AYON launcher.
|
||||||
|
|
||||||
|
"""
|
||||||
|
ayon_executable_path = os.path.normpath(os.environ["AYON_EXECUTABLE"])
|
||||||
|
executable_path = os.path.normpath(sys.executable)
|
||||||
|
return ayon_executable_path == executable_path
|
||||||
|
|
||||||
|
|
||||||
def is_running_from_build():
|
def is_running_from_build():
|
||||||
"""Determine if current process is running from build or code.
|
"""Determine if current process is running from build or code.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -467,8 +467,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
||||||
|
|
||||||
# Inject deadline url to instances to query DL for job id for overrides
|
# Inject deadline url to instances to query DL for job id for overrides
|
||||||
for inst in instances:
|
for inst in instances:
|
||||||
if not "deadline" in inst:
|
|
||||||
inst["deadline"] = {}
|
|
||||||
inst["deadline"] = instance.data["deadline"]
|
inst["deadline"] = instance.data["deadline"]
|
||||||
|
|
||||||
# publish job file
|
# publish job file
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
||||||
auth=auth,
|
auth=auth,
|
||||||
log=self.log)
|
log=self.log)
|
||||||
# some DL return "none" as a pool name
|
# some DL return "none" as a pool name
|
||||||
if not "none" in pools:
|
if "none" not in pools:
|
||||||
pools.append("none")
|
pools.append("none")
|
||||||
self.log.info("Available pools: {}".format(pools))
|
self.log.info("Available pools: {}".format(pools))
|
||||||
self.pools_per_url[deadline_url] = pools
|
self.pools_per_url[deadline_url] = pools
|
||||||
|
|
|
||||||
|
|
@ -2053,7 +2053,7 @@ class CreateContext:
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
add_traceback = True
|
add_traceback = True
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
|
|
@ -2163,7 +2163,7 @@ class CreateContext:
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
failed = True
|
failed = True
|
||||||
add_traceback = True
|
add_traceback = True
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
|
|
@ -2197,7 +2197,7 @@ class CreateContext:
|
||||||
try:
|
try:
|
||||||
convertor.find_instances()
|
convertor.find_instances()
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
failed_info.append(
|
failed_info.append(
|
||||||
prepare_failed_convertor_operation_info(
|
prepare_failed_convertor_operation_info(
|
||||||
convertor.identifier, sys.exc_info()
|
convertor.identifier, sys.exc_info()
|
||||||
|
|
@ -2373,7 +2373,7 @@ class CreateContext:
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
failed = True
|
failed = True
|
||||||
add_traceback = True
|
add_traceback = True
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
|
|
@ -2440,7 +2440,7 @@ class CreateContext:
|
||||||
error_message.format(identifier, exc_info[1])
|
error_message.format(identifier, exc_info[1])
|
||||||
)
|
)
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
failed = True
|
failed = True
|
||||||
add_traceback = True
|
add_traceback = True
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
|
|
@ -2546,7 +2546,7 @@ class CreateContext:
|
||||||
try:
|
try:
|
||||||
self.run_convertor(convertor_identifier)
|
self.run_convertor(convertor_identifier)
|
||||||
|
|
||||||
except:
|
except: # noqa: E722
|
||||||
failed_info.append(
|
failed_info.append(
|
||||||
prepare_failed_convertor_operation_info(
|
prepare_failed_convertor_operation_info(
|
||||||
convertor_identifier, sys.exc_info()
|
convertor_identifier, sys.exc_info()
|
||||||
|
|
|
||||||
|
|
@ -73,8 +73,8 @@ def get_folder_template_data(folder_entity, project_name):
|
||||||
- 'parent' - direct parent name, project name used if is under
|
- 'parent' - direct parent name, project name used if is under
|
||||||
project
|
project
|
||||||
|
|
||||||
Required document fields:
|
Required entity fields:
|
||||||
Folder: 'path' -> Plan to require: 'folderType'
|
Folder: 'path', 'folderType'
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
folder_entity (Dict[str, Any]): Folder entity.
|
folder_entity (Dict[str, Any]): Folder entity.
|
||||||
|
|
@ -101,6 +101,8 @@ def get_folder_template_data(folder_entity, project_name):
|
||||||
return {
|
return {
|
||||||
"folder": {
|
"folder": {
|
||||||
"name": folder_name,
|
"name": folder_name,
|
||||||
|
"type": folder_entity["folderType"],
|
||||||
|
"path": path,
|
||||||
},
|
},
|
||||||
"asset": folder_name,
|
"asset": folder_name,
|
||||||
"hierarchy": hierarchy,
|
"hierarchy": hierarchy,
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,7 @@ import collections
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
from ayon_core.pipeline.template_data import get_folder_template_data
|
||||||
from ayon_core.pipeline.version_start import get_versioning_start
|
from ayon_core.pipeline.version_start import get_versioning_start
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -383,24 +384,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||||
# - 'folder', 'hierarchy', 'parent', 'folder'
|
# - 'folder', 'hierarchy', 'parent', 'folder'
|
||||||
folder_entity = instance.data.get("folderEntity")
|
folder_entity = instance.data.get("folderEntity")
|
||||||
if folder_entity:
|
if folder_entity:
|
||||||
folder_name = folder_entity["name"]
|
folder_data = get_folder_template_data(
|
||||||
folder_path = folder_entity["path"]
|
folder_entity,
|
||||||
hierarchy_parts = folder_path.split("/")
|
project_entity["name"]
|
||||||
hierarchy_parts.pop(0)
|
)
|
||||||
hierarchy_parts.pop(-1)
|
anatomy_data.update(folder_data)
|
||||||
parent_name = project_entity["name"]
|
|
||||||
if hierarchy_parts:
|
|
||||||
parent_name = hierarchy_parts[-1]
|
|
||||||
|
|
||||||
hierarchy = "/".join(hierarchy_parts)
|
|
||||||
anatomy_data.update({
|
|
||||||
"asset": folder_name,
|
|
||||||
"hierarchy": hierarchy,
|
|
||||||
"parent": parent_name,
|
|
||||||
"folder": {
|
|
||||||
"name": folder_name,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if instance.data.get("newAssetPublishing"):
|
if instance.data.get("newAssetPublishing"):
|
||||||
|
|
@ -418,6 +406,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||||
"parent": parent_name,
|
"parent": parent_name,
|
||||||
"folder": {
|
"folder": {
|
||||||
"name": folder_name,
|
"name": folder_name,
|
||||||
|
"path": instance.data["folderPath"],
|
||||||
|
# TODO get folder type from hierarchy
|
||||||
|
# Using 'Shot' is current default behavior of editorial
|
||||||
|
# (or 'newAssetPublishing') publishing.
|
||||||
|
"type": "Shot",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -104,14 +104,11 @@ class WebServerTool:
|
||||||
again. In that case, use existing running webserver.
|
again. In that case, use existing running webserver.
|
||||||
Check here is easier than capturing exception from thread.
|
Check here is easier than capturing exception from thread.
|
||||||
"""
|
"""
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con:
|
||||||
result = True
|
result = con.connect_ex((host_name, port)) == 0
|
||||||
try:
|
|
||||||
sock.bind((host_name, port))
|
|
||||||
result = False
|
|
||||||
except:
|
|
||||||
print("Port is in use")
|
|
||||||
|
|
||||||
|
if result:
|
||||||
|
print(f"Port {port} is already in use")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def call(self, func):
|
def call(self, func):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue