mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge remote-tracking branch 'origin/develop' into develop
This commit is contained in:
commit
b765779212
42 changed files with 1852 additions and 468 deletions
|
|
@ -40,7 +40,8 @@ from .lib import (
|
|||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment
|
||||
add_tool_to_environment,
|
||||
get_latest_version
|
||||
)
|
||||
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
|
|
@ -85,5 +86,6 @@ __all__ = [
|
|||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
|
||||
"subprocess"
|
||||
"subprocess",
|
||||
"get_latest_version"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -158,6 +158,25 @@ class AExpectedFiles:
|
|||
"""To be implemented by renderer class."""
|
||||
pass
|
||||
|
||||
def sanitize_camera_name(self, camera):
|
||||
"""Sanitize camera name.
|
||||
|
||||
Remove Maya illegal characters from camera name.
|
||||
|
||||
Args:
|
||||
camera (str): Maya camera name.
|
||||
|
||||
Returns:
|
||||
(str): sanitized camera name
|
||||
|
||||
Example:
|
||||
>>> sanizite_camera_name('test:camera_01')
|
||||
test_camera_01
|
||||
|
||||
"""
|
||||
sanitized = re.sub('[^0-9a-zA-Z_]+', '_', camera)
|
||||
return sanitized
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
"""Return prefix for specific renderer.
|
||||
|
||||
|
|
@ -252,7 +271,7 @@ class AExpectedFiles:
|
|||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data["sceneName"]),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data["layerName"]),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, cam),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, self.sanitize_camera_name(cam)),
|
||||
# this is required to remove unfilled aov token, for example
|
||||
# in Redshift
|
||||
(R_REMOVE_AOV_TOKEN, ""),
|
||||
|
|
@ -287,7 +306,8 @@ class AExpectedFiles:
|
|||
mappings = (
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data["sceneName"]),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data["layerName"]),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, cam),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN,
|
||||
self.sanitize_camera_name(cam)),
|
||||
(R_SUBSTITUTE_AOV_TOKEN, aov[0]),
|
||||
(R_CLEAN_FRAME_TOKEN, ""),
|
||||
(R_CLEAN_EXT_TOKEN, ""),
|
||||
|
|
@ -314,7 +334,8 @@ class AExpectedFiles:
|
|||
# camera name to AOV to allow per camera AOVs.
|
||||
aov_name = aov[0]
|
||||
if len(layer_data["cameras"]) > 1:
|
||||
aov_name = "{}_{}".format(aov[0], cam)
|
||||
aov_name = "{}_{}".format(aov[0],
|
||||
self.sanitize_camera_name(cam))
|
||||
|
||||
aov_file_list[aov_name] = aov_files
|
||||
file_prefix = layer_data["filePrefix"]
|
||||
|
|
|
|||
|
|
@ -1445,7 +1445,7 @@ class ExporterReview:
|
|||
anlib.reset_selection()
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
if "Viewer" == n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
|
|
|
|||
262
pype/hosts/nukestudio/tags.json
Normal file
262
pype/hosts/nukestudio/tags.json
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
{
|
||||
"Hierarchy": {
|
||||
"editable": "1",
|
||||
"note": "{folder}/{sequence}/{shot}",
|
||||
"icon": {
|
||||
"path": "hierarchy.png"
|
||||
},
|
||||
"metadata": {
|
||||
"folder": "FOLDER_NAME",
|
||||
"shot": "{clip}",
|
||||
"track": "{track}",
|
||||
"sequence": "{sequence}",
|
||||
"episode": "EPISODE_NAME",
|
||||
"root": "{projectroot}"
|
||||
}
|
||||
},
|
||||
"Source Resolution": {
|
||||
"editable": "1",
|
||||
"note": "Use source resolution",
|
||||
"icon": {
|
||||
"path": "resolution.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "resolution"
|
||||
}
|
||||
},
|
||||
"Retiming": {
|
||||
"editable": "1",
|
||||
"note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)",
|
||||
"icon": {
|
||||
"path": "retiming.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "retiming",
|
||||
"marginIn": 1,
|
||||
"marginOut": 1
|
||||
}
|
||||
},
|
||||
"Frame start": {
|
||||
"editable": "1",
|
||||
"note": "Starting frame for comps. \n\n> Use `value` and add either number or write `source` (if you want to preserve source frame numbering)",
|
||||
"icon": {
|
||||
"path": "icons:TagBackground.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "frameStart",
|
||||
"value": "1001"
|
||||
}
|
||||
},
|
||||
"[Lenses]": {
|
||||
"Set lense here": {
|
||||
"editable": "1",
|
||||
"note": "Adjust parameters of your lense and then drop to clip. Remember! You can always overwrite on clip",
|
||||
"icon": {
|
||||
"path": "lense.png"
|
||||
},
|
||||
"metadata": {
|
||||
"focalLengthMm": 57
|
||||
|
||||
}
|
||||
}
|
||||
},
|
||||
"[Subsets]": {
|
||||
"Audio": {
|
||||
"editable": "1",
|
||||
"note": "Export with Audio",
|
||||
"icon": {
|
||||
"path": "volume.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "audio",
|
||||
"subset": "main"
|
||||
}
|
||||
},
|
||||
"plateFg": {
|
||||
"editable": "1",
|
||||
"note": "Add to publish to \"forground\" subset. Change metadata subset name if different order number",
|
||||
"icon": {
|
||||
"path": "z_layer_fg.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "plate",
|
||||
"subset": "Fg01"
|
||||
}
|
||||
},
|
||||
"plateBg": {
|
||||
"editable": "1",
|
||||
"note": "Add to publish to \"background\" subset. Change metadata subset name if different order number",
|
||||
"icon": {
|
||||
"path": "z_layer_bg.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "plate",
|
||||
"subset": "Bg01"
|
||||
}
|
||||
},
|
||||
"plateRef": {
|
||||
"editable": "1",
|
||||
"note": "Add to publish to \"reference\" subset.",
|
||||
"icon": {
|
||||
"path": "icons:Reference.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "plate",
|
||||
"subset": "Ref"
|
||||
}
|
||||
},
|
||||
"plateMain": {
|
||||
"editable": "1",
|
||||
"note": "Add to publish to \"main\" subset.",
|
||||
"icon": {
|
||||
"path": "z_layer_main.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "plate",
|
||||
"subset": "main"
|
||||
}
|
||||
},
|
||||
"plateProxy": {
|
||||
"editable": "1",
|
||||
"note": "Add to publish to \"proxy\" subset.",
|
||||
"icon": {
|
||||
"path": "z_layer_main.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "plate",
|
||||
"subset": "proxy"
|
||||
}
|
||||
},
|
||||
"review": {
|
||||
"editable": "1",
|
||||
"note": "Upload to Ftrack as review component.",
|
||||
"icon": {
|
||||
"path": "review.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "review",
|
||||
"track": "review"
|
||||
}
|
||||
}
|
||||
},
|
||||
"[Handles]": {
|
||||
"start: add 20 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to start of selected clip",
|
||||
"icon": {
|
||||
"path": "3_add_handles_start.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "20",
|
||||
"args": "{'op':'add','where':'start'}"
|
||||
}
|
||||
},
|
||||
"start: add 10 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to start of selected clip",
|
||||
"icon": {
|
||||
"path": "3_add_handles_start.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "10",
|
||||
"args": "{'op':'add','where':'start'}"
|
||||
}
|
||||
},
|
||||
"start: add 5 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to start of selected clip",
|
||||
"icon": {
|
||||
"path": "3_add_handles_start.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "5",
|
||||
"args": "{'op':'add','where':'start'}"
|
||||
}
|
||||
},
|
||||
"start: add 0 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to start of selected clip",
|
||||
"icon": {
|
||||
"path": "3_add_handles_start.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "0",
|
||||
"args": "{'op':'add','where':'start'}"
|
||||
}
|
||||
},
|
||||
"end: add 20 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to end of selected clip",
|
||||
"icon": {
|
||||
"path": "1_add_handles_end.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "20",
|
||||
"args": "{'op':'add','where':'end'}"
|
||||
}
|
||||
},
|
||||
"end: add 10 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to end of selected clip",
|
||||
"icon": {
|
||||
"path": "1_add_handles_end.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "10",
|
||||
"args": "{'op':'add','where':'end'}"
|
||||
}
|
||||
},
|
||||
"end: add 5 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to end of selected clip",
|
||||
"icon": {
|
||||
"path": "1_add_handles_end.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "5",
|
||||
"args": "{'op':'add','where':'end'}"
|
||||
}
|
||||
},
|
||||
"end: add 0 frames": {
|
||||
"editable": "1",
|
||||
"note": "Adding frames to end of selected clip",
|
||||
"icon": {
|
||||
"path": "1_add_handles_end.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "handles",
|
||||
"value": "0",
|
||||
"args": "{'op':'add','where':'end'}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"NukeScript": {
|
||||
"editable": "1",
|
||||
"note": "Collecting track items to Nuke scripts.",
|
||||
"icon": {
|
||||
"path": "icons:TagNuke.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "nukescript",
|
||||
"subset": "main"
|
||||
}
|
||||
},
|
||||
"Comment": {
|
||||
"editable": "1",
|
||||
"note": "Comment on a shot.",
|
||||
"icon": {
|
||||
"path": "icons:TagComment.png"
|
||||
},
|
||||
"metadata": {
|
||||
"family": "comment",
|
||||
"subset": "main"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +1,22 @@
|
|||
import re
|
||||
import os
|
||||
import json
|
||||
import hiero
|
||||
|
||||
from pype.api import (
|
||||
config,
|
||||
Logger
|
||||
)
|
||||
from pype.api import Logger
|
||||
from avalon import io
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
|
||||
def tag_data():
|
||||
current_dir = os.path.dirname(__file__)
|
||||
json_path = os.path.join(current_dir, "tags.json")
|
||||
with open(json_path, "r") as json_stream:
|
||||
data = json.load(json_stream)
|
||||
return data
|
||||
|
||||
|
||||
def create_tag(key, value):
|
||||
"""
|
||||
Creating Tag object.
|
||||
|
|
@ -58,13 +64,9 @@ def add_tags_from_presets():
|
|||
return
|
||||
|
||||
log.debug("Setting default tags on project: {}".format(project.name()))
|
||||
|
||||
# get all presets
|
||||
presets = config.get_presets()
|
||||
|
||||
# get nukestudio tag.json from presets
|
||||
nks_pres = presets["nukestudio"]
|
||||
nks_pres_tags = nks_pres.get("tags", None)
|
||||
# get nukestudio tags.json
|
||||
nks_pres_tags = tag_data()
|
||||
|
||||
# Get project task types.
|
||||
tasks = io.find_one({"type": "project"})["config"]["tasks"]
|
||||
|
|
|
|||
45
pype/lib.py
45
pype/lib.py
|
|
@ -520,14 +520,6 @@ def set_io_database():
|
|||
io.install()
|
||||
|
||||
|
||||
def get_all_avalon_projects():
|
||||
db = get_avalon_database()
|
||||
projects = []
|
||||
for name in db.collection_names():
|
||||
projects.append(db[name].find_one({'type': 'project'}))
|
||||
return projects
|
||||
|
||||
|
||||
def filter_pyblish_plugins(plugins):
|
||||
"""
|
||||
This servers as plugin filter / modifier for pyblish. It will load plugin
|
||||
|
|
@ -1387,3 +1379,40 @@ def ffprobe_streams(path_to_file):
|
|||
popen_output = popen.communicate()[0]
|
||||
log.debug("FFprobe output: {}".format(popen_output))
|
||||
return json.loads(popen_output)["streams"]
|
||||
|
||||
|
||||
def get_latest_version(asset_name, subset_name):
|
||||
"""Retrieve latest version from `asset_name`, and `subset_name`.
|
||||
|
||||
Args:
|
||||
asset_name (str): Name of asset.
|
||||
subset_name (str): Name of subset.
|
||||
"""
|
||||
# Get asset
|
||||
asset_name = io.find_one(
|
||||
{"type": "asset", "name": asset_name}, projection={"name": True}
|
||||
)
|
||||
|
||||
subset = io.find_one(
|
||||
{"type": "subset", "name": subset_name, "parent": asset_name["_id"]},
|
||||
projection={"_id": True, "name": True},
|
||||
)
|
||||
|
||||
# Check if subsets actually exists.
|
||||
assert subset, "No subsets found."
|
||||
|
||||
# Get version
|
||||
version_projection = {
|
||||
"name": True,
|
||||
"parent": True,
|
||||
}
|
||||
|
||||
version = io.find_one(
|
||||
{"type": "version", "parent": subset["_id"]},
|
||||
projection=version_projection,
|
||||
sort=[("name", -1)],
|
||||
)
|
||||
|
||||
assert version, "No version found, this is a bug"
|
||||
|
||||
return version
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@ import toml
|
|||
import time
|
||||
from pype.modules.ftrack.lib import AppAction
|
||||
from avalon import lib
|
||||
from pype.api import Logger
|
||||
from pype.lib import get_all_avalon_projects
|
||||
from pype.api import Logger, config
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
|
@ -49,17 +48,26 @@ def registerApp(app, session, plugins_presets):
|
|||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
# WARNING getting projects only helps to check connection to mongo
|
||||
# - without will `discover` of ftrack apps actions take ages
|
||||
result = get_all_avalon_projects()
|
||||
app_usages = (
|
||||
config.get_presets()
|
||||
.get("global", {})
|
||||
.get("applications")
|
||||
) or {}
|
||||
|
||||
apps = []
|
||||
|
||||
missing_app_names = []
|
||||
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
|
||||
for file in os.listdir(launchers_path):
|
||||
filename, ext = os.path.splitext(file)
|
||||
if ext.lower() != ".toml":
|
||||
continue
|
||||
|
||||
app_usage = app_usages.get(filename)
|
||||
if not app_usage:
|
||||
if app_usage is None:
|
||||
missing_app_names.append(filename)
|
||||
continue
|
||||
|
||||
loaded_data = toml.load(os.path.join(launchers_path, file))
|
||||
app_data = {
|
||||
"name": filename,
|
||||
|
|
@ -67,7 +75,17 @@ def register(session, plugins_presets={}):
|
|||
}
|
||||
apps.append(app_data)
|
||||
|
||||
apps = sorted(apps, key=lambda x: x['name'])
|
||||
if missing_app_names:
|
||||
log.debug(
|
||||
"Apps not defined in applications usage. ({})".format(
|
||||
", ".join((
|
||||
"\"{}\"".format(app_name)
|
||||
for app_name in missing_app_names
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
apps = sorted(apps, key=lambda app: app["name"])
|
||||
app_counter = 0
|
||||
for app in apps:
|
||||
try:
|
||||
|
|
@ -76,7 +94,7 @@ def register(session, plugins_presets={}):
|
|||
time.sleep(0.1)
|
||||
app_counter += 1
|
||||
except Exception as exc:
|
||||
log.exception(
|
||||
log.warning(
|
||||
"\"{}\" - not a proper App ({})".format(app['name'], str(exc)),
|
||||
exc_info=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import collections
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_pype_attr
|
||||
|
||||
|
||||
class CleanHierarchicalAttrsAction(BaseAction):
|
||||
|
|
@ -48,7 +48,7 @@ class CleanHierarchicalAttrsAction(BaseAction):
|
|||
)
|
||||
entity_ids_joined = ", ".join(all_entities_ids)
|
||||
|
||||
attrs, hier_attrs = get_avalon_attr(session)
|
||||
attrs, hier_attrs = get_pype_attr(session)
|
||||
|
||||
for attr in hier_attrs:
|
||||
configuration_key = attr["key"]
|
||||
|
|
|
|||
|
|
@ -1,99 +1,120 @@
|
|||
import os
|
||||
import collections
|
||||
import toml
|
||||
import json
|
||||
import arrow
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.modules.ftrack.lib.avalon_sync import (
|
||||
CUST_ATTR_ID_KEY, CUST_ATTR_GROUP, default_custom_attributes_definition
|
||||
)
|
||||
from pype.api import config
|
||||
|
||||
"""
|
||||
This action creates/updates custom attributes.
|
||||
- first part take care about avalon_mongo_id attribute
|
||||
- second part is based on json file in templates:
|
||||
~/PYPE-TEMPLATES/presets/ftrack/ftrack_custom_attributes.json
|
||||
- you can add Custom attributes based on these conditions
|
||||
## First part take care about special attributes
|
||||
- `avalon_mongo_id` for storing Avalon MongoID
|
||||
- `applications` based on applications usages
|
||||
- `tools` based on tools usages
|
||||
|
||||
## Second part is based on json file in ftrack module.
|
||||
File location: `~/pype/pype/modules/ftrack/ftrack_custom_attributes.json`
|
||||
|
||||
Data in json file is nested dictionary. Keys in first dictionary level
|
||||
represents Ftrack entity type (task, show, assetversion, user, list, asset)
|
||||
and dictionary value define attribute.
|
||||
|
||||
There is special key for hierchical attributes `is_hierarchical`.
|
||||
|
||||
Entity types `task` requires to define task object type (Folder, Shot,
|
||||
Sequence, Task, Library, Milestone, Episode, Asset Build, etc.) at second
|
||||
dictionary level, task's attributes are nested more.
|
||||
|
||||
*** Not Changeable *********************************************************
|
||||
|
||||
group (string)
|
||||
- name of group
|
||||
- based on attribute `pype.modules.ftrack.lib.CUST_ATTR_GROUP`
|
||||
- "pype" by default
|
||||
|
||||
*** Required ***************************************************************
|
||||
|
||||
label (string)
|
||||
- label that will show in ftrack
|
||||
- label that will show in ftrack
|
||||
|
||||
key (string)
|
||||
- must contain only chars [a-z0-9_]
|
||||
- must contain only chars [a-z0-9_]
|
||||
|
||||
type (string)
|
||||
- type of custom attribute
|
||||
- possibilities: text, boolean, date, enumerator, dynamic enumerator, number
|
||||
- type of custom attribute
|
||||
- possibilities:
|
||||
text, boolean, date, enumerator, dynamic enumerator, number
|
||||
|
||||
*** Required with conditions ***********************************************
|
||||
|
||||
entity_type (string)
|
||||
- if 'is_hierarchical' is set to False
|
||||
- type of entity
|
||||
- possibilities: task, show, assetversion, user, list, asset
|
||||
|
||||
config (dictionary)
|
||||
- for each entity type different requirements and possibilities:
|
||||
- enumerator: multiSelect = True/False(default: False)
|
||||
data = {key_1:value_1,key_2:value_2,..,key_n:value_n}
|
||||
- 'data' is Required value with enumerator
|
||||
- 'key' must contain only chars [a-z0-9_]
|
||||
- for each attribute type different requirements and possibilities:
|
||||
- enumerator:
|
||||
multiSelect = True/False(default: False)
|
||||
data = {key_1:value_1,key_2:value_2,..,key_n:value_n}
|
||||
- 'data' is Required value with enumerator
|
||||
- 'key' must contain only chars [a-z0-9_]
|
||||
|
||||
- number: isdecimal = True/False(default: False)
|
||||
- number:
|
||||
isdecimal = True/False(default: False)
|
||||
|
||||
- text: markdown = True/False(default: False)
|
||||
- text:
|
||||
markdown = True/False(default: False)
|
||||
|
||||
object_type (string)
|
||||
- IF ENTITY_TYPE is set to 'task'
|
||||
- default possibilities: Folder, Shot, Sequence, Task, Library,
|
||||
Milestone, Episode, Asset Build,...
|
||||
|
||||
*** Optional ***************************************************************
|
||||
*** Presetable keys **********************************************************
|
||||
|
||||
write_security_roles/read_security_roles (array of strings)
|
||||
- default: ["ALL"]
|
||||
- strings should be role names (e.g.: ["API", "Administrator"])
|
||||
- if set to ["ALL"] - all roles will be availabled
|
||||
- if first is 'except' - roles will be set to all except roles in array
|
||||
- Warning: Be carefull with except - roles can be different by company
|
||||
- example:
|
||||
write_security_roles = ["except", "User"]
|
||||
read_security_roles = ["ALL"]
|
||||
- User is unable to write but can read
|
||||
|
||||
group (string)
|
||||
- default: None
|
||||
- name of group
|
||||
- default: ["ALL"]
|
||||
- strings should be role names (e.g.: ["API", "Administrator"])
|
||||
- if set to ["ALL"] - all roles will be availabled
|
||||
- if first is 'except' - roles will be set to all except roles in array
|
||||
- Warning: Be carefull with except - roles can be different by company
|
||||
- example:
|
||||
write_security_roles = ["except", "User"]
|
||||
read_security_roles = ["ALL"] # (User is can only read)
|
||||
|
||||
default
|
||||
- default: None
|
||||
- sets default value for custom attribute:
|
||||
- text -> string
|
||||
- number -> integer
|
||||
- enumerator -> array with string of key/s
|
||||
- boolean -> bool true/false
|
||||
- date -> string in format: 'YYYY.MM.DD' or 'YYYY.MM.DD HH:mm:ss'
|
||||
- example: "2018.12.24" / "2018.1.1 6:0:0"
|
||||
- dynamic enumerator -> DON'T HAVE DEFAULT VALUE!!!
|
||||
- default: None
|
||||
- sets default value for custom attribute:
|
||||
- text -> string
|
||||
- number -> integer
|
||||
- enumerator -> array with string of key/s
|
||||
- boolean -> bool true/false
|
||||
- date -> string in format: 'YYYY.MM.DD' or 'YYYY.MM.DD HH:mm:ss'
|
||||
- example: "2018.12.24" / "2018.1.1 6:0:0"
|
||||
- dynamic enumerator -> DON'T HAVE DEFAULT VALUE!!!
|
||||
|
||||
is_hierarchical (bool)
|
||||
- default: False
|
||||
- will set hierachical attribute
|
||||
- False by default
|
||||
|
||||
EXAMPLE:
|
||||
{
|
||||
Example:
|
||||
```
|
||||
"show": {
|
||||
"avalon_auto_sync": {
|
||||
"label": "Avalon auto-sync",
|
||||
"key": "avalon_auto_sync",
|
||||
"type": "boolean",
|
||||
"entity_type": "show",
|
||||
"group": "avalon",
|
||||
"default": false,
|
||||
"write_security_role": ["API","Administrator"],
|
||||
"read_security_role": ["API","Administrator"]
|
||||
"label": "Avalon auto-sync",
|
||||
"type": "boolean",
|
||||
"write_security_role": ["API", "Administrator"],
|
||||
"read_security_role": ["API", "Administrator"]
|
||||
}
|
||||
},
|
||||
"is_hierarchical": {
|
||||
"fps": {
|
||||
"label": "FPS",
|
||||
"type": "number",
|
||||
"config": {"isdecimal": true}
|
||||
}
|
||||
},
|
||||
"task": {
|
||||
"library": {
|
||||
"my_attr_name": {
|
||||
"label": "My Attr",
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
"""
|
||||
|
||||
|
||||
|
|
@ -115,11 +136,15 @@ class CustomAttributes(BaseAction):
|
|||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
required_keys = ['key', 'label', 'type']
|
||||
type_posibilities = [
|
||||
'text', 'boolean', 'date', 'enumerator',
|
||||
'dynamic enumerator', 'number'
|
||||
]
|
||||
required_keys = ("key", "label", "type")
|
||||
|
||||
presetable_keys = ("default", "write_security_role", "read_security_role")
|
||||
hierarchical_key = "is_hierarchical"
|
||||
|
||||
type_posibilities = (
|
||||
"text", "boolean", "date", "enumerator",
|
||||
"dynamic enumerator", "number"
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
'''
|
||||
|
|
@ -141,21 +166,24 @@ class CustomAttributes(BaseAction):
|
|||
})
|
||||
})
|
||||
session.commit()
|
||||
|
||||
try:
|
||||
self.prepare_global_data(session)
|
||||
self.avalon_mongo_id_attributes(session, event)
|
||||
self.custom_attributes_from_file(session, event)
|
||||
self.applications_attribute(event)
|
||||
self.tools_attribute(event)
|
||||
self.intent_attribute(event)
|
||||
self.custom_attributes_from_file(event)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception as exc:
|
||||
except Exception:
|
||||
session.rollback()
|
||||
job['status'] = 'failed'
|
||||
job["status"] = "failed"
|
||||
session.commit()
|
||||
self.log.error(
|
||||
'Creating custom attributes failed ({})'.format(exc),
|
||||
exc_info=True
|
||||
"Creating custom attributes failed ({})", exc_info=True
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
@ -182,20 +210,39 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
self.groups = {}
|
||||
|
||||
self.presets = config.get_presets()
|
||||
self.attrs_presets = self.prepare_attribute_pressets()
|
||||
|
||||
def prepare_attribute_pressets(self):
|
||||
output = {}
|
||||
|
||||
attr_presets = (
|
||||
self.presets.get("ftrack", {}).get("ftrack_custom_attributes")
|
||||
) or {}
|
||||
for entity_type, preset in attr_presets.items():
|
||||
# Lower entity type
|
||||
entity_type = entity_type.lower()
|
||||
# Just store if entity type is not "task"
|
||||
if entity_type != "task":
|
||||
output[entity_type] = preset
|
||||
continue
|
||||
|
||||
# Prepare empty dictionary for entity type if not set yet
|
||||
if entity_type not in output:
|
||||
output[entity_type] = {}
|
||||
|
||||
# Store presets per lowered object type
|
||||
for obj_type, _preset in preset.items():
|
||||
output[entity_type][obj_type.lower()] = _preset
|
||||
|
||||
return output
|
||||
|
||||
def avalon_mongo_id_attributes(self, session, event):
|
||||
self.create_hierarchical_mongo_attr(session, event)
|
||||
|
||||
hierarchical_attr, object_type_attrs = (
|
||||
self.mongo_id_custom_attributes(session)
|
||||
)
|
||||
|
||||
if hierarchical_attr is None:
|
||||
self.create_hierarchical_mongo_attr(session)
|
||||
hierarchical_attr, object_type_attrs = (
|
||||
self.mongo_id_custom_attributes(session)
|
||||
)
|
||||
|
||||
if hierarchical_attr is None:
|
||||
return
|
||||
|
||||
if object_type_attrs:
|
||||
self.convert_mongo_id_to_hierarchical(
|
||||
hierarchical_attr, object_type_attrs, session, event
|
||||
|
|
@ -206,7 +253,7 @@ class CustomAttributes(BaseAction):
|
|||
"select id, entity_type, object_type_id, is_hierarchical, default"
|
||||
" from CustomAttributeConfiguration"
|
||||
" where key = \"{}\""
|
||||
).format(CustAttrIdKey)
|
||||
).format(CUST_ATTR_ID_KEY)
|
||||
|
||||
mongo_id_avalon_attr = session.query(cust_attrs_query).all()
|
||||
heirarchical_attr = None
|
||||
|
|
@ -220,32 +267,22 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
return heirarchical_attr, object_type_attrs
|
||||
|
||||
def create_hierarchical_mongo_attr(self, session):
|
||||
# Attribute Name and Label
|
||||
cust_attr_label = "Avalon/Mongo ID"
|
||||
|
||||
def create_hierarchical_mongo_attr(self, session, event):
|
||||
# Set security roles for attribute
|
||||
role_list = ("API", "Administrator", "Pypeclub")
|
||||
roles = self.get_security_roles(role_list)
|
||||
# Set Text type of Attribute
|
||||
custom_attribute_type = self.types_per_name["text"]
|
||||
# Set group to 'avalon'
|
||||
group = self.get_group("avalon")
|
||||
|
||||
default_role_list = ("API", "Administrator", "Pypeclub")
|
||||
data = {
|
||||
"key": CustAttrIdKey,
|
||||
"label": cust_attr_label,
|
||||
"type": custom_attribute_type,
|
||||
"key": CUST_ATTR_ID_KEY,
|
||||
"label": "Avalon/Mongo ID",
|
||||
"type": "text",
|
||||
"default": "",
|
||||
"write_security_roles": roles,
|
||||
"read_security_roles": roles,
|
||||
"group": group,
|
||||
"write_security_roles": default_role_list,
|
||||
"read_security_roles": default_role_list,
|
||||
"group": CUST_ATTR_GROUP,
|
||||
"is_hierarchical": True,
|
||||
"entity_type": "show",
|
||||
"config": json.dumps({"markdown": False})
|
||||
"config": {"markdown": False}
|
||||
}
|
||||
|
||||
self.process_attribute(data)
|
||||
self.process_attr_data(data, event)
|
||||
|
||||
def convert_mongo_id_to_hierarchical(
|
||||
self, hierarchical_attr, object_type_attrs, session, event
|
||||
|
|
@ -335,91 +372,253 @@ class CustomAttributes(BaseAction):
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
def custom_attributes_from_file(self, session, event):
|
||||
presets = config.get_presets()['ftrack']['ftrack_custom_attributes']
|
||||
def application_definitions(self):
|
||||
app_usages = self.presets.get("global", {}).get("applications") or {}
|
||||
|
||||
for cust_attr_data in presets:
|
||||
cust_attr_name = cust_attr_data.get(
|
||||
'label',
|
||||
cust_attr_data.get('key')
|
||||
app_definitions = []
|
||||
launchers_path = os.path.join(os.environ["PYPE_CONFIG"], "launchers")
|
||||
|
||||
missing_app_names = []
|
||||
for file in os.listdir(launchers_path):
|
||||
app_name, ext = os.path.splitext(file)
|
||||
if ext.lower() != ".toml":
|
||||
continue
|
||||
|
||||
if not app_usages.get(app_name):
|
||||
missing_app_names.append(app_name)
|
||||
continue
|
||||
|
||||
loaded_data = toml.load(os.path.join(launchers_path, file))
|
||||
|
||||
ftrack_label = loaded_data.get("ftrack_label")
|
||||
if ftrack_label:
|
||||
parts = app_name.split("_")
|
||||
if len(parts) > 1:
|
||||
ftrack_label = " ".join((ftrack_label, parts[-1]))
|
||||
else:
|
||||
ftrack_label = loaded_data.get("label", app_name)
|
||||
|
||||
app_definitions.append({app_name: ftrack_label})
|
||||
|
||||
if missing_app_names:
|
||||
self.log.warning(
|
||||
"Apps not defined in applications usage. ({})".format(
|
||||
", ".join((
|
||||
"\"{}\"".format(app_name)
|
||||
for app_name in missing_app_names
|
||||
))
|
||||
)
|
||||
)
|
||||
try:
|
||||
data = {}
|
||||
# Get key, label, type
|
||||
data.update(self.get_required(cust_attr_data))
|
||||
# Get hierachical/ entity_type/ object_id
|
||||
data.update(self.get_entity_type(cust_attr_data))
|
||||
# Get group, default, security roles
|
||||
data.update(self.get_optional(cust_attr_data))
|
||||
# Process data
|
||||
self.process_attribute(data)
|
||||
return app_definitions
|
||||
|
||||
except CustAttrException as cae:
|
||||
if cust_attr_name:
|
||||
msg = 'Custom attribute error "{}" - {}'.format(
|
||||
cust_attr_name, str(cae)
|
||||
)
|
||||
else:
|
||||
msg = 'Custom attribute error - {}'.format(str(cae))
|
||||
self.log.warning(msg, exc_info=True)
|
||||
self.show_message(event, msg)
|
||||
def applications_attribute(self, event):
|
||||
applications_custom_attr_data = {
|
||||
"label": "Applications",
|
||||
"key": "applications",
|
||||
"type": "enumerator",
|
||||
"entity_type": "show",
|
||||
"group": CUST_ATTR_GROUP,
|
||||
"config": {
|
||||
"multiselect": True,
|
||||
"data": self.application_definitions()
|
||||
}
|
||||
}
|
||||
self.process_attr_data(applications_custom_attr_data, event)
|
||||
|
||||
return True
|
||||
def tools_attribute(self, event):
|
||||
tool_usages = self.presets.get("global", {}).get("tools") or {}
|
||||
tools_data = []
|
||||
for tool_name, usage in tool_usages.items():
|
||||
if usage:
|
||||
tools_data.append({tool_name: tool_name})
|
||||
|
||||
tools_custom_attr_data = {
|
||||
"label": "Tools",
|
||||
"key": "tools_env",
|
||||
"type": "enumerator",
|
||||
"is_hierarchical": True,
|
||||
"group": CUST_ATTR_GROUP,
|
||||
"config": {
|
||||
"multiselect": True,
|
||||
"data": tools_data
|
||||
}
|
||||
}
|
||||
self.process_attr_data(tools_custom_attr_data, event)
|
||||
|
||||
def intent_attribute(self, event):
|
||||
intent_key_values = (
|
||||
self.presets
|
||||
.get("global", {})
|
||||
.get("intent", {})
|
||||
.get("items", {})
|
||||
) or {}
|
||||
|
||||
intent_values = []
|
||||
for key, label in intent_key_values.items():
|
||||
if not key or not label:
|
||||
self.log.info((
|
||||
"Skipping intent row: {{\"{}\": \"{}\"}}"
|
||||
" because of empty key or label."
|
||||
).format(key, label))
|
||||
continue
|
||||
|
||||
intent_values.append({key: label})
|
||||
|
||||
if not intent_values:
|
||||
return
|
||||
|
||||
intent_custom_attr_data = {
|
||||
"label": "Intent",
|
||||
"key": "intent",
|
||||
"type": "enumerator",
|
||||
"entity_type": "assetversion",
|
||||
"group": CUST_ATTR_GROUP,
|
||||
"config": {
|
||||
"multiselect": False,
|
||||
"data": intent_values
|
||||
}
|
||||
}
|
||||
self.process_attr_data(intent_custom_attr_data, event)
|
||||
|
||||
def custom_attributes_from_file(self, event):
|
||||
# Load json with custom attributes configurations
|
||||
cust_attr_def = default_custom_attributes_definition()
|
||||
attrs_data = []
|
||||
|
||||
# Prepare data of hierarchical attributes
|
||||
hierarchical_attrs = cust_attr_def.pop(self.hierarchical_key, {})
|
||||
for key, cust_attr_data in hierarchical_attrs.items():
|
||||
cust_attr_data["key"] = key
|
||||
cust_attr_data["is_hierarchical"] = True
|
||||
attrs_data.append(cust_attr_data)
|
||||
|
||||
# Prepare data of entity specific attributes
|
||||
for entity_type, cust_attr_datas in cust_attr_def.items():
|
||||
if entity_type.lower() != "task":
|
||||
for key, cust_attr_data in cust_attr_datas.items():
|
||||
cust_attr_data["key"] = key
|
||||
cust_attr_data["entity_type"] = entity_type
|
||||
attrs_data.append(cust_attr_data)
|
||||
continue
|
||||
|
||||
# Task should have nested level for object type
|
||||
for object_type, _cust_attr_datas in cust_attr_datas.items():
|
||||
for key, cust_attr_data in _cust_attr_datas.items():
|
||||
cust_attr_data["key"] = key
|
||||
cust_attr_data["entity_type"] = entity_type
|
||||
cust_attr_data["object_type"] = object_type
|
||||
attrs_data.append(cust_attr_data)
|
||||
|
||||
# Process prepared data
|
||||
for cust_attr_data in attrs_data:
|
||||
# Add group
|
||||
cust_attr_data["group"] = CUST_ATTR_GROUP
|
||||
self.process_attr_data(cust_attr_data, event)
|
||||
|
||||
def presets_for_attr_data(self, attr_data):
|
||||
output = {}
|
||||
|
||||
attr_key = attr_data["key"]
|
||||
if attr_data.get("is_hierarchical"):
|
||||
entity_key = self.hierarchical_key
|
||||
else:
|
||||
entity_key = attr_data["entity_type"]
|
||||
|
||||
entity_presets = self.attrs_presets.get(entity_key) or {}
|
||||
if entity_key.lower() == "task":
|
||||
object_type = attr_data["object_type"]
|
||||
entity_presets = entity_presets.get(object_type.lower()) or {}
|
||||
|
||||
key_presets = entity_presets.get(attr_key) or {}
|
||||
|
||||
for key, value in key_presets.items():
|
||||
if key in self.presetable_keys and value:
|
||||
output[key] = value
|
||||
return output
|
||||
|
||||
def process_attr_data(self, cust_attr_data, event):
|
||||
attr_presets = self.presets_for_attr_data(cust_attr_data)
|
||||
cust_attr_data.update(attr_presets)
|
||||
|
||||
try:
|
||||
data = {}
|
||||
# Get key, label, type
|
||||
data.update(self.get_required(cust_attr_data))
|
||||
# Get hierachical/ entity_type/ object_id
|
||||
data.update(self.get_entity_type(cust_attr_data))
|
||||
# Get group, default, security roles
|
||||
data.update(self.get_optional(cust_attr_data))
|
||||
# Process data
|
||||
self.process_attribute(data)
|
||||
|
||||
except CustAttrException as cae:
|
||||
cust_attr_name = cust_attr_data.get("label", cust_attr_data["key"])
|
||||
|
||||
if cust_attr_name:
|
||||
msg = 'Custom attribute error "{}" - {}'.format(
|
||||
cust_attr_name, str(cae)
|
||||
)
|
||||
else:
|
||||
msg = 'Custom attribute error - {}'.format(str(cae))
|
||||
self.log.warning(msg, exc_info=True)
|
||||
self.show_message(event, msg)
|
||||
|
||||
def process_attribute(self, data):
|
||||
existing_atr = self.session.query('CustomAttributeConfiguration').all()
|
||||
existing_attrs = self.session.query(
|
||||
"CustomAttributeConfiguration"
|
||||
).all()
|
||||
matching = []
|
||||
for attr in existing_atr:
|
||||
for attr in existing_attrs:
|
||||
if (
|
||||
attr['key'] != data['key'] or
|
||||
attr['type']['name'] != data['type']['name']
|
||||
attr["key"] != data["key"] or
|
||||
attr["type"]["name"] != data["type"]["name"]
|
||||
):
|
||||
continue
|
||||
|
||||
if data.get('is_hierarchical', False) is True:
|
||||
if attr['is_hierarchical'] is True:
|
||||
if data.get("is_hierarchical") is True:
|
||||
if attr["is_hierarchical"] is True:
|
||||
matching.append(attr)
|
||||
elif 'object_type_id' in data:
|
||||
elif "object_type_id" in data:
|
||||
if (
|
||||
attr['entity_type'] == data['entity_type'] and
|
||||
attr['object_type_id'] == data['object_type_id']
|
||||
attr["entity_type"] == data["entity_type"] and
|
||||
attr["object_type_id"] == data["object_type_id"]
|
||||
):
|
||||
matching.append(attr)
|
||||
else:
|
||||
if attr['entity_type'] == data['entity_type']:
|
||||
if attr["entity_type"] == data["entity_type"]:
|
||||
matching.append(attr)
|
||||
|
||||
if len(matching) == 0:
|
||||
self.session.create('CustomAttributeConfiguration', data)
|
||||
self.session.create("CustomAttributeConfiguration", data)
|
||||
self.session.commit()
|
||||
self.log.debug(
|
||||
'{}: "{}" created'.format(self.label, data['label'])
|
||||
"Custom attribute \"{}\" created".format(data["label"])
|
||||
)
|
||||
|
||||
elif len(matching) == 1:
|
||||
attr_update = matching[0]
|
||||
for key in data:
|
||||
if (
|
||||
key not in [
|
||||
'is_hierarchical', 'entity_type', 'object_type_id'
|
||||
]
|
||||
if key not in (
|
||||
"is_hierarchical", "entity_type", "object_type_id"
|
||||
):
|
||||
attr_update[key] = data[key]
|
||||
|
||||
self.log.debug(
|
||||
'{}: "{}" updated'.format(self.label, data['label'])
|
||||
)
|
||||
self.session.commit()
|
||||
self.log.debug(
|
||||
"Custom attribute \"{}\" updated".format(data["label"])
|
||||
)
|
||||
|
||||
else:
|
||||
raise CustAttrException('Is duplicated')
|
||||
raise CustAttrException((
|
||||
"Custom attribute is duplicated. Key: \"{}\" Type: \"{}\""
|
||||
).format(data["key"], data["type"]["name"]))
|
||||
|
||||
def get_required(self, attr):
|
||||
output = {}
|
||||
for key in self.required_keys:
|
||||
if key not in attr:
|
||||
raise CustAttrException(
|
||||
'Key {} is required - please set'.format(key)
|
||||
"BUG: Key \"{}\" is required".format(key)
|
||||
)
|
||||
|
||||
if attr['type'].lower() not in self.type_posibilities:
|
||||
|
|
@ -593,17 +792,17 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
def get_optional(self, attr):
|
||||
output = {}
|
||||
if 'group' in attr:
|
||||
output['group'] = self.get_group(attr)
|
||||
if 'default' in attr:
|
||||
output['default'] = self.get_default(attr)
|
||||
if "group" in attr:
|
||||
output["group"] = self.get_group(attr)
|
||||
if "default" in attr:
|
||||
output["default"] = self.get_default(attr)
|
||||
|
||||
roles_read = []
|
||||
roles_write = []
|
||||
if 'read_security_roles' in output:
|
||||
roles_read = attr['read_security_roles']
|
||||
if 'read_security_roles' in output:
|
||||
roles_write = attr['write_security_roles']
|
||||
if "read_security_roles" in attr:
|
||||
roles_read = attr["read_security_roles"]
|
||||
if "write_security_roles" in attr:
|
||||
roles_write = attr["write_security_roles"]
|
||||
output['read_security_roles'] = self.get_security_roles(roles_read)
|
||||
output['write_security_roles'] = self.get_security_roles(roles_write)
|
||||
|
||||
|
|
|
|||
|
|
@ -105,11 +105,34 @@ class DeleteOldVersions(BaseAction):
|
|||
"value": False
|
||||
})
|
||||
|
||||
items.append(self.splitter_item)
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<i>This will <b>NOT</b> delete any files and only return the "
|
||||
"total size of the files.</i>"
|
||||
)
|
||||
})
|
||||
items.append({
|
||||
"type": "boolean",
|
||||
"name": "only_calculate",
|
||||
"label": "Only calculate size of files.",
|
||||
"value": False
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": self.inteface_title
|
||||
}
|
||||
|
||||
def sizeof_fmt(self, num, suffix='B'):
|
||||
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
||||
if abs(num) < 1024.0:
|
||||
return "%3.1f%s%s" % (num, unit, suffix)
|
||||
num /= 1024.0
|
||||
return "%.1f%s%s" % (num, 'Yi', suffix)
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
values = event["data"].get("values")
|
||||
if not values:
|
||||
|
|
@ -117,6 +140,7 @@ class DeleteOldVersions(BaseAction):
|
|||
|
||||
versions_count = int(values["last_versions_count"])
|
||||
force_to_remove = values["force_delete_publish_folder"]
|
||||
only_calculate = values["only_calculate"]
|
||||
|
||||
_val1 = "OFF"
|
||||
if force_to_remove:
|
||||
|
|
@ -318,10 +342,29 @@ class DeleteOldVersions(BaseAction):
|
|||
"Folder does not exist. Deleting it's files skipped: {}"
|
||||
).format(paths_msg))
|
||||
|
||||
# Size of files.
|
||||
size = 0
|
||||
|
||||
if only_calculate:
|
||||
if force_to_remove:
|
||||
size = self.delete_whole_dir_paths(
|
||||
dir_paths.values(), delete=False
|
||||
)
|
||||
else:
|
||||
size = self.delete_only_repre_files(
|
||||
dir_paths, file_paths_by_dir, delete=False
|
||||
)
|
||||
|
||||
msg = "Total size of files: " + self.sizeof_fmt(size)
|
||||
|
||||
self.log.warning(msg)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
if force_to_remove:
|
||||
self.delete_whole_dir_paths(dir_paths.values())
|
||||
size = self.delete_whole_dir_paths(dir_paths.values())
|
||||
else:
|
||||
self.delete_only_repre_files(dir_paths, file_paths_by_dir)
|
||||
size = self.delete_only_repre_files(dir_paths, file_paths_by_dir)
|
||||
|
||||
mongo_changes_bulk = []
|
||||
for version in versions:
|
||||
|
|
@ -383,17 +426,31 @@ class DeleteOldVersions(BaseAction):
|
|||
"message": msg
|
||||
}
|
||||
|
||||
return True
|
||||
msg = "Total size of files deleted: " + self.sizeof_fmt(size)
|
||||
|
||||
self.log.warning(msg)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
def delete_whole_dir_paths(self, dir_paths, delete=True):
|
||||
size = 0
|
||||
|
||||
def delete_whole_dir_paths(self, dir_paths):
|
||||
for dir_path in dir_paths:
|
||||
# Delete all files and fodlers in dir path
|
||||
for root, dirs, files in os.walk(dir_path, topdown=False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
file_path = os.path.join(root, name)
|
||||
size += os.path.getsize(file_path)
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
if delete:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
|
||||
if not delete:
|
||||
continue
|
||||
|
||||
# Delete even the folder and it's parents folders if they are empty
|
||||
while True:
|
||||
|
|
@ -406,7 +463,11 @@ class DeleteOldVersions(BaseAction):
|
|||
|
||||
os.rmdir(os.path.join(dir_path))
|
||||
|
||||
def delete_only_repre_files(self, dir_paths, file_paths):
|
||||
return size
|
||||
|
||||
def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
|
||||
size = 0
|
||||
|
||||
for dir_id, dir_path in dir_paths.items():
|
||||
dir_files = os.listdir(dir_path)
|
||||
collections, remainders = clique.assemble(dir_files)
|
||||
|
|
@ -420,8 +481,13 @@ class DeleteOldVersions(BaseAction):
|
|||
"File was not found: {}".format(file_path)
|
||||
)
|
||||
continue
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
size += os.path.getsize(file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
remainders.remove(file_path_base)
|
||||
continue
|
||||
|
||||
|
|
@ -440,21 +506,34 @@ class DeleteOldVersions(BaseAction):
|
|||
final_col.head = os.path.join(dir_path, final_col.head)
|
||||
for _file_path in final_col:
|
||||
if os.path.exists(_file_path):
|
||||
os.remove(_file_path)
|
||||
|
||||
size += os.path.getsize(_file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(_file_path)
|
||||
self.log.debug(
|
||||
"Removed file: {}".format(_file_path)
|
||||
)
|
||||
|
||||
_seq_path = final_col.format("{head}{padding}{tail}")
|
||||
self.log.debug("Removed files: {}".format(_seq_path))
|
||||
collections.remove(final_col)
|
||||
|
||||
elif os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
size += os.path.getsize(file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
else:
|
||||
self.log.warning(
|
||||
"File was not found: {}".format(file_path)
|
||||
)
|
||||
|
||||
# Delete as much as possible parent folders
|
||||
if not delete:
|
||||
return size
|
||||
|
||||
for dir_path in dir_paths.values():
|
||||
while True:
|
||||
if not os.path.exists(dir_path):
|
||||
|
|
@ -467,6 +546,8 @@ class DeleteOldVersions(BaseAction):
|
|||
self.log.debug("Removed folder: {}".format(dir_path))
|
||||
os.rmdir(dir_path)
|
||||
|
||||
return size
|
||||
|
||||
def path_from_represenation(self, representation, anatomy):
|
||||
try:
|
||||
template = representation["data"]["template"]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import config, Anatomy, project_overrides_dir_path
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_pype_attr
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
|
|
@ -221,7 +221,7 @@ class PrepareProject(BaseAction):
|
|||
def _attributes_to_set(self, project_defaults):
|
||||
attributes_to_set = {}
|
||||
|
||||
cust_attrs, hier_cust_attrs = get_avalon_attr(self.session, True)
|
||||
cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)
|
||||
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from pype.modules.ftrack.lib import BaseAction, statics_icon
|
|||
from pype.api import Anatomy
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
|
||||
|
||||
class StoreThumbnailsToAvalon(BaseAction):
|
||||
|
|
@ -390,7 +390,7 @@ class StoreThumbnailsToAvalon(BaseAction):
|
|||
return output
|
||||
|
||||
asset_ent = None
|
||||
asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
|
||||
asset_mongo_id = parent["custom_attributes"].get(CUST_ATTR_ID_KEY)
|
||||
if asset_mongo_id:
|
||||
try:
|
||||
asset_mongo_id = ObjectId(asset_mongo_id)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from pype.modules.ftrack.lib import BaseEvent
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
from pype.modules.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent
|
||||
|
||||
|
||||
|
|
@ -29,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
elif (
|
||||
entity.get('action', None) == 'update' and
|
||||
CustAttrIdKey in entity['keys'] and
|
||||
CUST_ATTR_ID_KEY in entity['keys'] and
|
||||
entity_id in created
|
||||
):
|
||||
ftrack_entity = session.get(
|
||||
|
|
@ -37,12 +37,9 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
entity_id
|
||||
)
|
||||
|
||||
cust_attr = ftrack_entity['custom_attributes'][
|
||||
CustAttrIdKey
|
||||
]
|
||||
|
||||
if cust_attr != '':
|
||||
ftrack_entity['custom_attributes'][CustAttrIdKey] = ''
|
||||
cust_attrs = ftrack_entity["custom_attributes"]
|
||||
if cust_attrs[CUST_ATTR_ID_KEY]:
|
||||
cust_attrs[CUST_ATTR_ID_KEY] = ""
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from avalon import schema
|
|||
|
||||
from pype.modules.ftrack.lib import avalon_sync
|
||||
from pype.modules.ftrack.lib.avalon_sync import (
|
||||
CustAttrIdKey, CustAttrAutoSync, EntitySchemas
|
||||
CUST_ATTR_ID_KEY, CUST_ATTR_AUTO_SYNC, EntitySchemas
|
||||
)
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseEvent
|
||||
|
|
@ -103,7 +103,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
@property
|
||||
def avalon_cust_attrs(self):
|
||||
if self._avalon_cust_attrs is None:
|
||||
self._avalon_cust_attrs = avalon_sync.get_avalon_attr(
|
||||
self._avalon_cust_attrs = avalon_sync.get_pype_attr(
|
||||
self.process_session
|
||||
)
|
||||
return self._avalon_cust_attrs
|
||||
|
|
@ -220,7 +220,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
def avalon_custom_attributes(self):
|
||||
"""Return info about changeability of entity and it's parents."""
|
||||
if self._avalon_custom_attributes is None:
|
||||
self._avalon_custom_attributes = avalon_sync.get_avalon_attr(
|
||||
self._avalon_custom_attributes = avalon_sync.get_pype_attr(
|
||||
self.process_session
|
||||
)
|
||||
return self._avalon_custom_attributes
|
||||
|
|
@ -557,10 +557,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
continue
|
||||
|
||||
changes = ent_info["changes"]
|
||||
if CustAttrAutoSync not in changes:
|
||||
if CUST_ATTR_AUTO_SYNC not in changes:
|
||||
continue
|
||||
|
||||
auto_sync = changes[CustAttrAutoSync]["new"]
|
||||
auto_sync = changes[CUST_ATTR_AUTO_SYNC]["new"]
|
||||
if auto_sync == "1":
|
||||
# Trigger sync to avalon action if auto sync was turned on
|
||||
ft_project = self.cur_project
|
||||
|
|
@ -593,16 +593,16 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
ft_project = self.cur_project
|
||||
# Check if auto-sync custom attribute exists
|
||||
if CustAttrAutoSync not in ft_project["custom_attributes"]:
|
||||
if CUST_ATTR_AUTO_SYNC not in ft_project["custom_attributes"]:
|
||||
# TODO should we sent message to someone?
|
||||
self.log.error((
|
||||
"Custom attribute \"{}\" is not created or user \"{}\" used"
|
||||
" for Event server don't have permissions to access it!"
|
||||
).format(CustAttrAutoSync, self.session.api_user))
|
||||
).format(CUST_ATTR_AUTO_SYNC, self.session.api_user))
|
||||
return True
|
||||
|
||||
# Skip if auto-sync is not set
|
||||
auto_sync = ft_project["custom_attributes"][CustAttrAutoSync]
|
||||
auto_sync = ft_project["custom_attributes"][CUST_ATTR_AUTO_SYNC]
|
||||
if auto_sync is not True:
|
||||
return True
|
||||
|
||||
|
|
@ -844,7 +844,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
new_entity["custom_attributes"][key] = val
|
||||
|
||||
new_entity["custom_attributes"][CustAttrIdKey] = (
|
||||
new_entity["custom_attributes"][CUST_ATTR_ID_KEY] = (
|
||||
str(avalon_entity["_id"])
|
||||
)
|
||||
ent_path = self.get_ent_path(new_entity_id)
|
||||
|
|
@ -1097,7 +1097,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
continue
|
||||
final_entity["data"][key] = val
|
||||
|
||||
_mongo_id_str = cust_attrs.get(CustAttrIdKey)
|
||||
_mongo_id_str = cust_attrs.get(CUST_ATTR_ID_KEY)
|
||||
if _mongo_id_str:
|
||||
try:
|
||||
_mongo_id = ObjectId(_mongo_id_str)
|
||||
|
|
@ -1158,15 +1158,17 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self.log.debug("Entity was synchronized <{}>".format(ent_path))
|
||||
|
||||
mongo_id_str = str(mongo_id)
|
||||
if mongo_id_str != ftrack_ent["custom_attributes"][CustAttrIdKey]:
|
||||
ftrack_ent["custom_attributes"][CustAttrIdKey] = mongo_id_str
|
||||
if mongo_id_str != ftrack_ent["custom_attributes"][CUST_ATTR_ID_KEY]:
|
||||
ftrack_ent["custom_attributes"][CUST_ATTR_ID_KEY] = mongo_id_str
|
||||
try:
|
||||
self.process_session.commit()
|
||||
except Exception:
|
||||
self.process_session.rolback()
|
||||
# TODO logging
|
||||
# TODO report
|
||||
error_msg = "Failed to store MongoID to entity's custom attribute"
|
||||
error_msg = (
|
||||
"Failed to store MongoID to entity's custom attribute"
|
||||
)
|
||||
report_msg = (
|
||||
"{}||SyncToAvalon action may solve this issue"
|
||||
).format(error_msg)
|
||||
|
|
@ -1245,7 +1247,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self.process_session, entity, hier_keys, defaults
|
||||
)
|
||||
for key, val in hier_values.items():
|
||||
if key == CustAttrIdKey:
|
||||
if key == CUST_ATTR_ID_KEY:
|
||||
continue
|
||||
output[key] = val
|
||||
|
||||
|
|
@ -1687,7 +1689,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if "_hierarchical" not in temp_dict:
|
||||
hier_mongo_id_configuration_id = None
|
||||
for attr in hier_attrs:
|
||||
if attr["key"] == CustAttrIdKey:
|
||||
if attr["key"] == CUST_ATTR_ID_KEY:
|
||||
hier_mongo_id_configuration_id = attr["id"]
|
||||
break
|
||||
temp_dict["_hierarchical"] = hier_mongo_id_configuration_id
|
||||
|
|
@ -1704,7 +1706,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
for attr in cust_attrs:
|
||||
key = attr["key"]
|
||||
if key != CustAttrIdKey:
|
||||
if key != CUST_ATTR_ID_KEY:
|
||||
continue
|
||||
|
||||
if attr["entity_type"] != ent_info["entityType"]:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import re
|
|||
import subprocess
|
||||
|
||||
from pype.modules.ftrack import BaseEvent
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
|
@ -106,7 +106,7 @@ class UserAssigmentEvent(BaseEvent):
|
|||
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
|
||||
|
||||
avalon_entity = None
|
||||
parent_id = parent['custom_attributes'].get(CustAttrIdKey)
|
||||
parent_id = parent['custom_attributes'].get(CUST_ATTR_ID_KEY)
|
||||
if parent_id:
|
||||
parent_id = ObjectId(parent_id)
|
||||
avalon_entity = self.db_con.find_one({
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from .ftrack_event_handler import BaseEvent
|
|||
from .ftrack_action_handler import BaseAction, statics_icon
|
||||
from .ftrack_app_handler import AppAction
|
||||
|
||||
__all__ = [
|
||||
__all__ = (
|
||||
"avalon_sync",
|
||||
"credentials",
|
||||
"BaseHandler",
|
||||
|
|
@ -13,4 +13,4 @@ __all__ = [
|
|||
"BaseAction",
|
||||
"statics_icon",
|
||||
"AppAction"
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import re
|
||||
import queue
|
||||
import json
|
||||
import collections
|
||||
import copy
|
||||
|
||||
|
|
@ -27,9 +28,21 @@ EntitySchemas = {
|
|||
"config": "avalon-core:config-1.0"
|
||||
}
|
||||
|
||||
# Group name of custom attributes
|
||||
CUST_ATTR_GROUP = "pype"
|
||||
|
||||
# name of Custom attribute that stores mongo_id from avalon db
|
||||
CustAttrIdKey = "avalon_mongo_id"
|
||||
CustAttrAutoSync = "avalon_auto_sync"
|
||||
CUST_ATTR_ID_KEY = "avalon_mongo_id"
|
||||
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
|
||||
|
||||
|
||||
def default_custom_attributes_definition():
|
||||
json_file_path = os.path.join(
|
||||
os.path.dirname(__file__), "custom_attributes.json"
|
||||
)
|
||||
with open(json_file_path, "r") as json_stream:
|
||||
data = json.load(json_stream)
|
||||
return data
|
||||
|
||||
|
||||
def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
||||
|
|
@ -51,10 +64,11 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
|||
if not schema_obj:
|
||||
name_pattern = default_pattern
|
||||
else:
|
||||
name_pattern = schema_obj.get(
|
||||
"properties", {}).get(
|
||||
"name", {}).get(
|
||||
"pattern", default_pattern
|
||||
name_pattern = (
|
||||
schema_obj
|
||||
.get("properties", {})
|
||||
.get("name", {})
|
||||
.get("pattern", default_pattern)
|
||||
)
|
||||
if schema_patterns is not None:
|
||||
schema_patterns[schema_name] = name_pattern
|
||||
|
|
@ -64,9 +78,10 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
|||
return False
|
||||
|
||||
|
||||
def get_avalon_attr(session, split_hierarchical=True):
|
||||
def get_pype_attr(session, split_hierarchical=True):
|
||||
custom_attributes = []
|
||||
hier_custom_attributes = []
|
||||
# TODO remove deprecated "avalon" group from query
|
||||
cust_attrs_query = (
|
||||
"select id, entity_type, object_type_id, is_hierarchical, default"
|
||||
" from CustomAttributeConfiguration"
|
||||
|
|
@ -322,12 +337,12 @@ class SyncEntitiesFactory:
|
|||
"*** Synchronization initialization started <{}>."
|
||||
).format(project_full_name))
|
||||
# Check if `avalon_mongo_id` custom attribute exist or is accessible
|
||||
if CustAttrIdKey not in ft_project["custom_attributes"]:
|
||||
if CUST_ATTR_ID_KEY not in ft_project["custom_attributes"]:
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# Can't access Custom attribute <{}>".format(
|
||||
CustAttrIdKey
|
||||
CUST_ATTR_ID_KEY
|
||||
)
|
||||
})
|
||||
items.append({
|
||||
|
|
@ -687,7 +702,7 @@ class SyncEntitiesFactory:
|
|||
def set_cutom_attributes(self):
|
||||
self.log.debug("* Preparing custom attributes")
|
||||
# Get custom attributes and values
|
||||
custom_attrs, hier_attrs = get_avalon_attr(self.session)
|
||||
custom_attrs, hier_attrs = get_pype_attr(self.session)
|
||||
ent_types = self.session.query("select id, name from ObjectType").all()
|
||||
ent_types_by_name = {
|
||||
ent_type["name"]: ent_type["id"] for ent_type in ent_types
|
||||
|
|
@ -904,7 +919,7 @@ class SyncEntitiesFactory:
|
|||
project_values[key] = value
|
||||
|
||||
for key in avalon_hier:
|
||||
if key == CustAttrIdKey:
|
||||
if key == CUST_ATTR_ID_KEY:
|
||||
continue
|
||||
value = self.entities_dict[top_id]["avalon_attrs"][key]
|
||||
if value is not None:
|
||||
|
|
@ -1058,7 +1073,7 @@ class SyncEntitiesFactory:
|
|||
same_mongo_id = []
|
||||
all_mongo_ids = {}
|
||||
for ftrack_id, entity_dict in self.entities_dict.items():
|
||||
mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey)
|
||||
mongo_id = entity_dict["avalon_attrs"].get(CUST_ATTR_ID_KEY)
|
||||
if not mongo_id:
|
||||
continue
|
||||
if mongo_id in all_mongo_ids:
|
||||
|
|
@ -1089,7 +1104,7 @@ class SyncEntitiesFactory:
|
|||
entity_dict = self.entities_dict[ftrack_id]
|
||||
ent_path = self.get_ent_path(ftrack_id)
|
||||
|
||||
mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey)
|
||||
mongo_id = entity_dict["avalon_attrs"].get(CUST_ATTR_ID_KEY)
|
||||
av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
|
||||
if av_ent_by_mongo_id:
|
||||
av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
|
||||
|
|
@ -1110,7 +1125,9 @@ class SyncEntitiesFactory:
|
|||
continue
|
||||
|
||||
_entity_dict = self.entities_dict[_ftrack_id]
|
||||
_mongo_id = _entity_dict["avalon_attrs"][CustAttrIdKey]
|
||||
_mongo_id = (
|
||||
_entity_dict["avalon_attrs"][CUST_ATTR_ID_KEY]
|
||||
)
|
||||
_av_ent_by_mongo_id = self.avalon_ents_by_id.get(
|
||||
_mongo_id
|
||||
)
|
||||
|
|
@ -1503,11 +1520,11 @@ class SyncEntitiesFactory:
|
|||
|
||||
avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
|
||||
if (
|
||||
CustAttrIdKey not in avalon_attrs or
|
||||
avalon_attrs[CustAttrIdKey] != avalon_id
|
||||
CUST_ATTR_ID_KEY not in avalon_attrs or
|
||||
avalon_attrs[CUST_ATTR_ID_KEY] != avalon_id
|
||||
):
|
||||
configuration_id = self.entities_dict[ftrack_id][
|
||||
"avalon_attrs_id"][CustAttrIdKey]
|
||||
"avalon_attrs_id"][CUST_ATTR_ID_KEY]
|
||||
|
||||
_entity_key = collections.OrderedDict({
|
||||
"configuration_id": configuration_id,
|
||||
|
|
@ -1587,7 +1604,7 @@ class SyncEntitiesFactory:
|
|||
|
||||
# avalon_archived_by_id avalon_archived_by_name
|
||||
current_id = (
|
||||
entity_dict["avalon_attrs"].get(CustAttrIdKey) or ""
|
||||
entity_dict["avalon_attrs"].get(CUST_ATTR_ID_KEY) or ""
|
||||
).strip()
|
||||
mongo_id = current_id
|
||||
name = entity_dict["name"]
|
||||
|
|
@ -1623,14 +1640,14 @@ class SyncEntitiesFactory:
|
|||
if current_id != new_id_str:
|
||||
# store mongo id to ftrack entity
|
||||
configuration_id = self.hier_cust_attr_ids_by_key.get(
|
||||
CustAttrIdKey
|
||||
CUST_ATTR_ID_KEY
|
||||
)
|
||||
if not configuration_id:
|
||||
# NOTE this is for cases when CustAttrIdKey key is not
|
||||
# NOTE this is for cases when CUST_ATTR_ID_KEY key is not
|
||||
# hierarchical custom attribute but per entity type
|
||||
configuration_id = self.entities_dict[ftrack_id][
|
||||
"avalon_attrs_id"
|
||||
][CustAttrIdKey]
|
||||
][CUST_ATTR_ID_KEY]
|
||||
|
||||
_entity_key = collections.OrderedDict({
|
||||
"configuration_id": configuration_id,
|
||||
|
|
@ -1739,7 +1756,7 @@ class SyncEntitiesFactory:
|
|||
project_item = self.entities_dict[self.ft_project_id]["final_entity"]
|
||||
mongo_id = (
|
||||
self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
|
||||
CustAttrIdKey
|
||||
CUST_ATTR_ID_KEY
|
||||
) or ""
|
||||
).strip()
|
||||
|
||||
|
|
@ -1770,7 +1787,7 @@ class SyncEntitiesFactory:
|
|||
|
||||
# store mongo id to ftrack entity
|
||||
entity = self.entities_dict[self.ft_project_id]["entity"]
|
||||
entity["custom_attributes"][CustAttrIdKey] = str(new_id)
|
||||
entity["custom_attributes"][CUST_ATTR_ID_KEY] = str(new_id)
|
||||
|
||||
def _bubble_changeability(self, unchangeable_ids):
|
||||
unchangeable_queue = queue.Queue()
|
||||
|
|
@ -2151,7 +2168,7 @@ class SyncEntitiesFactory:
|
|||
if new_entity_id not in p_chilren:
|
||||
self.entities_dict[parent_id]["children"].append(new_entity_id)
|
||||
|
||||
cust_attr, hier_attrs = get_avalon_attr(self.session)
|
||||
cust_attr, hier_attrs = get_pype_attr(self.session)
|
||||
for _attr in cust_attr:
|
||||
key = _attr["key"]
|
||||
if key not in av_entity["data"]:
|
||||
|
|
@ -2167,7 +2184,7 @@ class SyncEntitiesFactory:
|
|||
new_entity["custom_attributes"][key] = value
|
||||
|
||||
av_entity_id = str(av_entity["_id"])
|
||||
new_entity["custom_attributes"][CustAttrIdKey] = av_entity_id
|
||||
new_entity["custom_attributes"][CUST_ATTR_ID_KEY] = av_entity_id
|
||||
|
||||
self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
|
||||
self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
|
||||
|
|
|
|||
60
pype/modules/ftrack/lib/custom_attributes.json
Normal file
60
pype/modules/ftrack/lib/custom_attributes.json
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
"show": {
|
||||
"avalon_auto_sync": {
|
||||
"label": "Avalon auto-sync",
|
||||
"type": "boolean",
|
||||
"write_security_role": ["API", "Administrator"],
|
||||
"read_security_role": ["API", "Administrator"]
|
||||
},
|
||||
"library_project": {
|
||||
"label": "Library Project",
|
||||
"type": "boolean",
|
||||
"write_security_role": ["API", "Administrator"],
|
||||
"read_security_role": ["API", "Administrator"]
|
||||
}
|
||||
},
|
||||
"is_hierarchical": {
|
||||
"fps": {
|
||||
"label": "FPS",
|
||||
"type": "number",
|
||||
"config": {"isdecimal": true}
|
||||
},
|
||||
"clipIn": {
|
||||
"label": "Clip in",
|
||||
"type": "number"
|
||||
},
|
||||
"clipOut": {
|
||||
"label": "Clip out",
|
||||
"type": "number"
|
||||
},
|
||||
"frameStart": {
|
||||
"label": "Frame start",
|
||||
"type": "number"
|
||||
},
|
||||
"frameEnd": {
|
||||
"label": "Frame end",
|
||||
"type": "number"
|
||||
},
|
||||
"resolutionWidth": {
|
||||
"label": "Resolution Width",
|
||||
"type": "number"
|
||||
},
|
||||
"resolutionHeight": {
|
||||
"label": "Resolution Height",
|
||||
"type": "number"
|
||||
},
|
||||
"pixelAspect": {
|
||||
"label": "Pixel aspect",
|
||||
"type": "number",
|
||||
"config": {"isdecimal": true}
|
||||
},
|
||||
"handleStart": {
|
||||
"label": "Frame handles start",
|
||||
"type": "number"
|
||||
},
|
||||
"handleEnd": {
|
||||
"label": "Frame handles end",
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
5
pype/modules/websocket_server/__init__.py
Normal file
5
pype/modules/websocket_server/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .websocket_server import WebSocketServer
|
||||
|
||||
|
||||
def tray_init(tray_widget, main_widget):
|
||||
return WebSocketServer()
|
||||
0
pype/modules/websocket_server/hosts/__init__.py
Normal file
0
pype/modules/websocket_server/hosts/__init__.py
Normal file
47
pype/modules/websocket_server/hosts/external_app_1.py
Normal file
47
pype/modules/websocket_server/hosts/external_app_1.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import asyncio
|
||||
|
||||
from pype.api import Logger
|
||||
from wsrpc_aiohttp import WebSocketRoute
|
||||
|
||||
log = Logger().get_logger("WebsocketServer")
|
||||
|
||||
|
||||
class ExternalApp1(WebSocketRoute):
|
||||
"""
|
||||
One route, mimicking external application (like Harmony, etc).
|
||||
All functions could be called from client.
|
||||
'do_notify' function calls function on the client - mimicking
|
||||
notification after long running job on the server or similar
|
||||
"""
|
||||
|
||||
def init(self, **kwargs):
|
||||
# Python __init__ must be return "self".
|
||||
# This method might return anything.
|
||||
log.debug("someone called ExternalApp1 route")
|
||||
return kwargs
|
||||
|
||||
async def server_function_one(self):
|
||||
log.info('In function one')
|
||||
|
||||
async def server_function_two(self):
|
||||
log.info('In function two')
|
||||
return 'function two'
|
||||
|
||||
async def server_function_three(self):
|
||||
log.info('In function three')
|
||||
asyncio.ensure_future(self.do_notify())
|
||||
return '{"message":"function tree"}'
|
||||
|
||||
async def server_function_four(self, *args, **kwargs):
|
||||
log.info('In function four args {} kwargs {}'.format(args, kwargs))
|
||||
ret = dict(**kwargs)
|
||||
ret["message"] = "function four received arguments"
|
||||
return str(ret)
|
||||
|
||||
# This method calls function on the client side
|
||||
async def do_notify(self):
|
||||
import time
|
||||
time.sleep(5)
|
||||
log.info('Calling function on server after delay')
|
||||
awesome = 'Somebody server_function_three method!'
|
||||
await self.socket.call('notify', result=awesome)
|
||||
179
pype/modules/websocket_server/test_client/wsrpc_client.html
Normal file
179
pype/modules/websocket_server/test_client/wsrpc_client.html
Normal file
|
|
@ -0,0 +1,179 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Title</title>
|
||||
|
||||
<!-- CSS only -->
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css" integrity="sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk" crossorigin="anonymous">
|
||||
|
||||
<script src="https://code.jquery.com/jquery-3.5.1.slim.min.js" integrity="sha384-DfXdz2htPH0lsSSs5nCTpuj/zy4C+OGpamoFVy38MVBnE+IbbVYUew+OrCXaRkfj" crossorigin="anonymous"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/popper.js@1.16.0/dist/umd/popper.min.js" integrity="sha384-Q6E9RHvbIyZFJoft+2mJbHaEWldlvI9IOYy5n3zV9zzTtmI3UksdQRVvoxMfooAo" crossorigin="anonymous"></script>
|
||||
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/js/bootstrap.min.js" integrity="sha384-OgVRvuATP1z7JjHLkuOU7Xw704+h835Lr+6QL9UvYjZE3Ipu6Tp75j7Bh/kR0JKI" crossorigin="anonymous"></script>
|
||||
|
||||
<script type="text/javascript" src="//unpkg.com/@wsrpc/client"></script>
|
||||
<script>
|
||||
WSRPC.DEBUG = true;
|
||||
WSRPC.TRACE = true;
|
||||
var url = (window.location.protocol==="https):"?"wss://":"ws://") + window.location.host + '/ws/';
|
||||
url = 'ws://localhost:8099/ws/';
|
||||
RPC = new WSRPC(url, 5000);
|
||||
|
||||
console.log(RPC.state());
|
||||
// Configure client API, that can be called from server
|
||||
RPC.addRoute('notify', function (data) {
|
||||
console.log('Server called client route "notify":', data);
|
||||
alert('Server called client route "notify":', data)
|
||||
return data.result;
|
||||
});
|
||||
RPC.connect();
|
||||
console.log(RPC.state());
|
||||
|
||||
$(document).ready(function() {
|
||||
function NoReturn(){
|
||||
// Call stateful route
|
||||
// After you call that route, server would execute 'notify' route on the
|
||||
// client, that is registered above.
|
||||
RPC.call('ExternalApp1.server_function_one').then(function (data) {
|
||||
console.log('Result for calling server route "server_function_one": ', data);
|
||||
alert('Function "server_function_two" returned: '+data);
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
}
|
||||
|
||||
function ReturnValue(){
|
||||
// Call stateful route
|
||||
// After you call that route, server would execute 'notify' route on the
|
||||
// client, that is registered above.
|
||||
RPC.call('ExternalApp1.server_function_two').then(function (data) {
|
||||
console.log('Result for calling server route "server_function_two": ', data);
|
||||
alert('Function "server_function_two" returned: '+data);
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
}
|
||||
|
||||
function ValueAndNotify(){
|
||||
// After you call that route, server would execute 'notify' route on the
|
||||
// client, that is registered above.
|
||||
RPC.call('ExternalApp1.server_function_three').then(function (data) {
|
||||
console.log('Result for calling server route "server_function_three": ', data);
|
||||
alert('Function "server_function_three" returned: '+data);
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
}
|
||||
|
||||
function SendValue(){
|
||||
// After you call that route, server would execute 'notify' route on the
|
||||
// client, that is registered above.
|
||||
RPC.call('ExternalApp1.server_function_four', {foo: 'one', bar:'two'}).then(function (data) {
|
||||
console.log('Result for calling server route "server_function_four": ', data);
|
||||
alert('Function "server_function_four" returned: '+data);
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
}
|
||||
|
||||
$('#noReturn').click(function() {
|
||||
NoReturn();
|
||||
})
|
||||
|
||||
$('#returnValue').click(function() {
|
||||
ReturnValue();
|
||||
})
|
||||
|
||||
$('#valueAndNotify').click(function() {
|
||||
ValueAndNotify();
|
||||
})
|
||||
|
||||
$('#sendValue').click(function() {
|
||||
SendValue();
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
<!-- // Call stateless method-->
|
||||
<!-- RPC.call('test2').then(function (data) {-->
|
||||
<!-- console.log('Result for calling server route "test2"', data);-->
|
||||
<!-- });-->
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="d-flex flex-column flex-md-row align-items-center p-3 px-md-4 mb-3 bg-white border-bottom shadow-sm">
|
||||
<h5 class="my-0 mr-md-auto font-weight-normal">Test of wsrpc javascript client</h5>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="container">
|
||||
<div class="card-deck mb-3 text-center">
|
||||
<div class="card mb-4 shadow-sm">
|
||||
<div class="card-header">
|
||||
<h4 class="my-0 font-weight-normal">No return value</h4>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<ul class="list-unstyled mt-3 mb-4">
|
||||
<li>Calls server_function_one</li>
|
||||
<li>Function only logs on server</li>
|
||||
<li>No return value</li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
</ul>
|
||||
<button type="button" id="noReturn" class="btn btn-lg btn-block btn-outline-primary">Call server</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card mb-4 shadow-sm">
|
||||
<div class="card-header">
|
||||
<h4 class="my-0 font-weight-normal">Return value</h4>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<ul class="list-unstyled mt-3 mb-4">
|
||||
<li>Calls server_function_two</li>
|
||||
<li>Function logs on server</li>
|
||||
<li>Returns simple text value</li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
</ul>
|
||||
<button type="button" id="returnValue" class="btn btn-lg btn-block btn-outline-primary">Call server</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card mb-4 shadow-sm">
|
||||
<div class="card-header">
|
||||
<h4 class="my-0 font-weight-normal">Notify</h4>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<ul class="list-unstyled mt-3 mb-4">
|
||||
<li>Calls server_function_three</li>
|
||||
<li>Function logs on server</li>
|
||||
<li>Returns json payload </li>
|
||||
<li>Server then calls function ON the client after delay</li>
|
||||
<li> </li>
|
||||
</ul>
|
||||
<button type="button" id="valueAndNotify" class="btn btn-lg btn-block btn-outline-primary">Call server</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card mb-4 shadow-sm">
|
||||
<div class="card-header">
|
||||
<h4 class="my-0 font-weight-normal">Send value</h4>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<ul class="list-unstyled mt-3 mb-4">
|
||||
<li>Calls server_function_four</li>
|
||||
<li>Function logs on server</li>
|
||||
<li>Returns modified sent values</li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
<li> </li>
|
||||
</ul>
|
||||
<button type="button" id="sendValue" class="btn btn-lg btn-block btn-outline-primary">Call server</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
34
pype/modules/websocket_server/test_client/wsrpc_client.py
Normal file
34
pype/modules/websocket_server/test_client/wsrpc_client.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import asyncio
|
||||
|
||||
from wsrpc_aiohttp import WSRPCClient
|
||||
|
||||
"""
|
||||
Simple testing Python client for wsrpc_aiohttp
|
||||
Calls sequentially multiple methods on server
|
||||
"""
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
|
||||
async def main():
|
||||
print("main")
|
||||
client = WSRPCClient("ws://127.0.0.1:8099/ws/",
|
||||
loop=asyncio.get_event_loop())
|
||||
|
||||
client.add_route('notify', notify)
|
||||
await client.connect()
|
||||
print("connected")
|
||||
print(await client.proxy.ExternalApp1.server_function_one())
|
||||
print(await client.proxy.ExternalApp1.server_function_two())
|
||||
print(await client.proxy.ExternalApp1.server_function_three())
|
||||
print(await client.proxy.ExternalApp1.server_function_four(foo="one"))
|
||||
await client.close()
|
||||
|
||||
|
||||
def notify(socket, *args, **kwargs):
|
||||
print("called from server")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# loop.run_until_complete(main())
|
||||
asyncio.run(main())
|
||||
187
pype/modules/websocket_server/websocket_server.py
Normal file
187
pype/modules/websocket_server/websocket_server.py
Normal file
|
|
@ -0,0 +1,187 @@
|
|||
from pype.api import config, Logger
|
||||
|
||||
import threading
|
||||
from aiohttp import web
|
||||
import asyncio
|
||||
from wsrpc_aiohttp import STATIC_DIR, WebSocketAsync
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pyclbr
|
||||
import importlib
|
||||
|
||||
log = Logger().get_logger("WebsocketServer")
|
||||
|
||||
|
||||
class WebSocketServer():
|
||||
"""
|
||||
Basic POC implementation of asychronic websocket RPC server.
|
||||
Uses class in external_app_1.py to mimic implementation for single
|
||||
external application.
|
||||
'test_client' folder contains two test implementations of client
|
||||
|
||||
WIP
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.qaction = None
|
||||
self.failed_icon = None
|
||||
self._is_running = False
|
||||
default_port = 8099
|
||||
|
||||
try:
|
||||
self.presets = config.get_presets()["services"]["websocket_server"]
|
||||
except Exception:
|
||||
self.presets = {"default_port": default_port, "exclude_ports": []}
|
||||
log.debug((
|
||||
"There are not set presets for WebsocketServer."
|
||||
" Using defaults \"{}\""
|
||||
).format(str(self.presets)))
|
||||
|
||||
self.app = web.Application()
|
||||
|
||||
self.app.router.add_route("*", "/ws/", WebSocketAsync)
|
||||
self.app.router.add_static("/js", STATIC_DIR)
|
||||
self.app.router.add_static("/", ".")
|
||||
|
||||
# add route with multiple methods for single "external app"
|
||||
directories_with_routes = ['hosts']
|
||||
self.add_routes_for_directories(directories_with_routes)
|
||||
|
||||
self.websocket_thread = WebsocketServerThread(self, default_port)
|
||||
|
||||
def add_routes_for_directories(self, directories_with_routes):
|
||||
""" Loops through selected directories to find all modules and
|
||||
in them all classes implementing 'WebSocketRoute' that could be
|
||||
used as route.
|
||||
All methods in these classes are registered automatically.
|
||||
"""
|
||||
for dir_name in directories_with_routes:
|
||||
dir_name = os.path.join(os.path.dirname(__file__), dir_name)
|
||||
for file_name in os.listdir(dir_name):
|
||||
if '.py' in file_name and '__' not in file_name:
|
||||
self.add_routes_for_module(file_name, dir_name)
|
||||
|
||||
def add_routes_for_module(self, file_name, dir_name):
|
||||
""" Auto routes for all classes implementing 'WebSocketRoute'
|
||||
in 'file_name' in 'dir_name'
|
||||
"""
|
||||
module_name = file_name.replace('.py', '')
|
||||
module_info = pyclbr.readmodule(module_name, [dir_name])
|
||||
|
||||
for class_name, cls_object in module_info.items():
|
||||
sys.path.append(dir_name)
|
||||
if 'WebSocketRoute' in cls_object.super:
|
||||
log.debug('Adding route for {}'.format(class_name))
|
||||
module = importlib.import_module(module_name)
|
||||
cls = getattr(module, class_name)
|
||||
WebSocketAsync.add_route(class_name, cls)
|
||||
sys.path.pop()
|
||||
|
||||
def tray_start(self):
|
||||
self.websocket_thread.start()
|
||||
|
||||
def tray_exit(self):
|
||||
self.stop()
|
||||
|
||||
def stop_websocket_server(self):
|
||||
|
||||
self.stop()
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
return self.websocket_thread.is_running
|
||||
|
||||
def stop(self):
|
||||
if not self.is_running:
|
||||
return
|
||||
try:
|
||||
log.debug("Stopping websocket server")
|
||||
self.websocket_thread.is_running = False
|
||||
self.websocket_thread.stop()
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing websocket server",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
def thread_stopped(self):
|
||||
self._is_running = False
|
||||
|
||||
|
||||
class WebsocketServerThread(threading.Thread):
|
||||
""" Listener for websocket rpc requests.
|
||||
|
||||
It would be probably better to "attach" this to main thread (as for
|
||||
example Harmony needs to run something on main thread), but currently
|
||||
it creates separate thread and separate asyncio event loop
|
||||
"""
|
||||
def __init__(self, module, port):
|
||||
super(WebsocketServerThread, self).__init__()
|
||||
self.is_running = False
|
||||
self.port = port
|
||||
self.module = module
|
||||
self.loop = None
|
||||
self.runner = None
|
||||
self.site = None
|
||||
|
||||
def run(self):
|
||||
self.is_running = True
|
||||
|
||||
try:
|
||||
log.info("Starting websocket server")
|
||||
self.loop = asyncio.new_event_loop() # create new loop for thread
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.loop.run_until_complete(self.start_server())
|
||||
|
||||
log.debug(
|
||||
"Running Websocket server on URL:"
|
||||
" \"ws://localhost:{}\"".format(self.port)
|
||||
)
|
||||
|
||||
asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
|
||||
self.loop.run_forever()
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Websocket Server service has failed", exc_info=True
|
||||
)
|
||||
finally:
|
||||
self.loop.close() # optional
|
||||
|
||||
self.is_running = False
|
||||
self.module.thread_stopped()
|
||||
log.info("Websocket server stopped")
|
||||
|
||||
async def start_server(self):
|
||||
""" Starts runner and TCPsite """
|
||||
self.runner = web.AppRunner(self.module.app)
|
||||
await self.runner.setup()
|
||||
self.site = web.TCPSite(self.runner, 'localhost', self.port)
|
||||
await self.site.start()
|
||||
|
||||
def stop(self):
|
||||
"""Sets is_running flag to false, 'check_shutdown' shuts server down"""
|
||||
self.is_running = False
|
||||
|
||||
async def check_shutdown(self):
|
||||
""" Future that is running and checks if server should be running
|
||||
periodically.
|
||||
"""
|
||||
while self.is_running:
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
log.debug("Starting shutdown")
|
||||
await self.site.stop()
|
||||
log.debug("Site stopped")
|
||||
await self.runner.cleanup()
|
||||
log.debug("Runner stopped")
|
||||
tasks = [task for task in asyncio.all_tasks() if
|
||||
task is not asyncio.current_task()]
|
||||
list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
log.debug(f'Finished awaiting cancelled tasks, results: {results}...')
|
||||
await self.loop.shutdown_asyncgens()
|
||||
# to really make sure everything else has time to stop
|
||||
await asyncio.sleep(0.07)
|
||||
self.loop.stop()
|
||||
|
|
@ -1,11 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Cleanup leftover files from publish."""
|
||||
import os
|
||||
import shutil
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def clean_renders(instance):
|
||||
transfers = instance.data.get("transfers", list())
|
||||
"""Delete renders after publishing.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instace): Instance to work on.
|
||||
|
||||
"""
|
||||
transfers = instance.data.get("transfers", list())
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
dirnames = []
|
||||
|
|
@ -40,6 +47,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in instance.context.data["results"]:
|
||||
|
|
@ -52,7 +60,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
)
|
||||
)
|
||||
|
||||
self.log.info("Cleaning renders ...")
|
||||
self.log.info("Performing cleanup on {}".format(instance))
|
||||
clean_renders(instance)
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
|
|
@ -60,16 +68,17 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
return
|
||||
import tempfile
|
||||
|
||||
staging_dir = instance.data.get("stagingDir", None)
|
||||
if not staging_dir or not os.path.exists(staging_dir):
|
||||
self.log.info("No staging directory found: %s" % staging_dir)
|
||||
return
|
||||
|
||||
temp_root = tempfile.gettempdir()
|
||||
staging_dir = instance.data.get("stagingDir", None)
|
||||
|
||||
if not os.path.normpath(staging_dir).startswith(temp_root):
|
||||
self.log.info("Skipping cleanup. Staging directory is not in the "
|
||||
"temp folder: %s" % staging_dir)
|
||||
return
|
||||
|
||||
self.log.info("Removing staging directory ...")
|
||||
if not staging_dir or not os.path.exists(staging_dir):
|
||||
self.log.info("No staging directory found: %s" % staging_dir)
|
||||
return
|
||||
|
||||
self.log.info("Removing staging directory {}".format(staging_dir))
|
||||
shutil.rmtree(staging_dir)
|
||||
|
|
|
|||
|
|
@ -410,15 +410,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# go through aovs in expected files
|
||||
for aov, files in exp_files[0].items():
|
||||
cols, rem = clique.assemble(files)
|
||||
# we shouldn't have any reminders
|
||||
if rem:
|
||||
self.log.warning(
|
||||
"skipping unexpected files found "
|
||||
"in sequence: {}".format(rem))
|
||||
|
||||
# but we really expect only one collection, nothing else make sense
|
||||
assert len(cols) == 1, "only one image sequence type is expected"
|
||||
# we shouldn't have any reminders. And if we do, it should
|
||||
# be just one item for single frame renders.
|
||||
if not cols and rem:
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
col = rem[0]
|
||||
_, ext = os.path.splitext(col)
|
||||
else:
|
||||
# but we really expect only one collection.
|
||||
# Nothing else make sense.
|
||||
assert len(cols) == 1, "only one image sequence type is expected" # noqa: E501
|
||||
_, ext = os.path.splitext(cols[0].tail)
|
||||
col = list(cols[0])
|
||||
|
||||
self.log.debug(col)
|
||||
# create subset name `familyTaskSubset_AOV`
|
||||
group_name = 'render{}{}{}{}'.format(
|
||||
task[0].upper(), task[1:],
|
||||
|
|
@ -426,7 +433,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
|
||||
staging = os.path.dirname(list(cols[0])[0])
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
else:
|
||||
staging = os.path.dirname(col)
|
||||
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
|
|
@ -451,13 +462,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
new_instance["subset"] = subset_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
ext = cols[0].tail.lstrip(".")
|
||||
|
||||
# create represenation
|
||||
if isinstance(col, (list, tuple)):
|
||||
files = [os.path.basename(f) for f in col]
|
||||
else:
|
||||
files = os.path.basename(col)
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(cols[0])],
|
||||
"files": files,
|
||||
"frameStart": int(instance_data.get("frameStartHandle")),
|
||||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
|
|
|
|||
133
pype/plugins/global/publish/validate_instance_in_context.py
Normal file
133
pype/plugins/global/publish/validate_instance_in_context.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Select Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
if instances:
|
||||
self.log.info(
|
||||
"Selecting invalid nodes: %s" % ", ".join(
|
||||
[str(x) for x in instances]
|
||||
)
|
||||
)
|
||||
self.select(instances)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
import nuke
|
||||
avalon.nuke.lib.select_nodes(
|
||||
[nuke.toNode(str(x)) for x in instances]
|
||||
)
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
|
||||
def deselect(self):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
avalon.nuke.lib.reset_selection()
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
self.set_attribute(instance, context_asset)
|
||||
|
||||
def set_attribute(self, instance, context_asset):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import nuke
|
||||
nuke.toNode(
|
||||
instance.data.get("name")
|
||||
)["avalon:asset"].setValue(context_asset)
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["maya", "nuke"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
def process(self, instance):
|
||||
asset = instance.data.get("asset")
|
||||
context_asset = instance.context.data["assetEntity"]["name"]
|
||||
msg = "{} has asset {}".format(instance.name, asset)
|
||||
assert asset == context_asset, msg
|
||||
|
|
@ -1,14 +1,25 @@
|
|||
from avalon import api
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
from avalon.maya import lib
|
||||
from maya import cmds
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Load and update RenderSetup settings.
|
||||
|
||||
Working with RenderSetup setting is Maya is done utilizing json files.
|
||||
When this json is loaded, it will overwrite all settings on RenderSetup
|
||||
instance.
|
||||
"""
|
||||
|
||||
import json
|
||||
import six
|
||||
import sys
|
||||
|
||||
from avalon import api
|
||||
from avalon.maya import lib
|
||||
from pype.hosts.maya import lib as pypelib
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
||||
|
||||
class RenderSetupLoader(api.Loader):
|
||||
"""
|
||||
This will load json preset for RenderSetup, overwriting current one.
|
||||
"""
|
||||
"""Load json preset for RenderSetup overwriting current one."""
|
||||
|
||||
families = ["rendersetup"]
|
||||
representations = ["json"]
|
||||
|
|
@ -19,7 +30,7 @@ class RenderSetupLoader(api.Loader):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
"""Load RenderSetup settings."""
|
||||
from avalon.maya.pipeline import containerise
|
||||
# from pype.hosts.maya.lib import namespaced
|
||||
|
||||
|
|
@ -29,7 +40,7 @@ class RenderSetupLoader(api.Loader):
|
|||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
self.log.info(">>> loading json [ {} ]".format(self.fname))
|
||||
with open(self.fname, "r") as file:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
|
|
@ -42,9 +53,56 @@ class RenderSetupLoader(api.Loader):
|
|||
if not nodes:
|
||||
return
|
||||
|
||||
self.log.info(">>> containerising [ {} ]".format(name))
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove RenderSetup settings instance."""
|
||||
from maya import cmds
|
||||
|
||||
container_name = container["objectName"]
|
||||
|
||||
self.log.info("Removing '%s' from Maya.." % container["name"])
|
||||
|
||||
container_content = cmds.sets(container_name, query=True)
|
||||
nodes = cmds.ls(container_content, long=True)
|
||||
|
||||
nodes.append(container_name)
|
||||
|
||||
try:
|
||||
cmds.delete(nodes)
|
||||
except ValueError:
|
||||
# Already implicitly deleted by Maya upon removing reference
|
||||
pass
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update RenderSetup setting by overwriting existing settings."""
|
||||
pypelib.show_message(
|
||||
"Render setup update",
|
||||
"Render setup setting will be overwritten by new version. All "
|
||||
"setting specified by user not included in loaded version "
|
||||
"will be lost.")
|
||||
path = api.get_representation_path(representation)
|
||||
with open(path, "r") as file:
|
||||
try:
|
||||
renderSetup.instance().decode(
|
||||
json.load(file), renderSetup.DECODE_AND_OVERWRITE, None)
|
||||
except Exception:
|
||||
self.log.error("There were errors during loading")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
# Update metadata
|
||||
node = container["objectName"]
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
self.log.info("... updated")
|
||||
|
||||
def switch(self, container, representation):
|
||||
"""Switch representations."""
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
label = "Camera (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["camera"]
|
||||
bake_attributes = []
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -66,6 +67,14 @@ class ExtractCameraAlembic(pype.api.Extractor):
|
|||
|
||||
job_str += ' -file "{0}"'.format(path)
|
||||
|
||||
# bake specified attributes in preset
|
||||
assert isinstance(self.bake_attributes, (list, tuple)), (
|
||||
"Attributes to bake must be specified as a list"
|
||||
)
|
||||
for attr in self.bake_attributes:
|
||||
self.log.info("Adding {} attribute".format(attr))
|
||||
job_str += " -attr {0}".format(attr)
|
||||
|
||||
with lib.evaluation("off"):
|
||||
with avalon.maya.suspended_refresh():
|
||||
cmds.AbcExport(j=job_str, verbose=False)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract camera as Maya Scene."""
|
||||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -65,8 +67,8 @@ def unlock(plug):
|
|||
cmds.disconnectAttr(source, destination)
|
||||
|
||||
|
||||
class ExtractCameraMayaAscii(pype.api.Extractor):
|
||||
"""Extract a Camera as Maya Ascii.
|
||||
class ExtractCameraMayaScene(pype.api.Extractor):
|
||||
"""Extract a Camera as Maya Scene.
|
||||
|
||||
This will create a duplicate of the camera that will be baked *with*
|
||||
substeps and handles for the required frames. This temporary duplicate
|
||||
|
|
@ -81,13 +83,28 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Camera (Maya Ascii)"
|
||||
label = "Camera (Maya Scene)"
|
||||
hosts = ["maya"]
|
||||
families = ["camera"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
"""Plugin entry point."""
|
||||
# get settings
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
# no preset found
|
||||
pass
|
||||
|
||||
framerange = [instance.data.get("frameStart", 1),
|
||||
instance.data.get("frameEnd", 1)]
|
||||
handles = instance.data.get("handles", 0)
|
||||
|
|
@ -95,7 +112,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
bake_to_worldspace = instance.data("bakeToWorldSpace", True)
|
||||
|
||||
if not bake_to_worldspace:
|
||||
self.log.warning("Camera (Maya Ascii) export only supports world"
|
||||
self.log.warning("Camera (Maya Scene) export only supports world"
|
||||
"space baked camera extractions. The disabled "
|
||||
"bake to world space is ignored...")
|
||||
|
||||
|
|
@ -115,7 +132,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
filename = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
path = os.path.join(dir_path, filename)
|
||||
|
||||
# Perform extraction
|
||||
|
|
@ -152,7 +169,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
cmds.select(baked_shapes, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
constructionHistory=False,
|
||||
|
|
@ -164,15 +181,15 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
# Delete the baked hierarchy
|
||||
if bake_to_worldspace:
|
||||
cmds.delete(baked)
|
||||
|
||||
massage_ma_file(path)
|
||||
if self.scene_type == "ma":
|
||||
massage_ma_file(path)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': filename,
|
||||
"stagingDir": dir_path,
|
||||
}
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract data as Maya scene (raw)."""
|
||||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -6,24 +8,37 @@ import avalon.maya
|
|||
import pype.api
|
||||
|
||||
|
||||
class ExtractMayaAsciiRaw(pype.api.Extractor):
|
||||
"""Extract as Maya Ascii (raw)
|
||||
class ExtractMayaSceneRaw(pype.api.Extractor):
|
||||
"""Extract as Maya Scene (raw).
|
||||
|
||||
This will preserve all references, construction history, etc.
|
||||
|
||||
"""
|
||||
|
||||
label = "Maya ASCII (Raw)"
|
||||
label = "Maya Scene (Raw)"
|
||||
hosts = ["maya"]
|
||||
families = ["mayaAscii",
|
||||
"setdress",
|
||||
"layout"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
# no preset found
|
||||
pass
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
filename = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
path = os.path.join(dir_path, filename)
|
||||
|
||||
# Whether to include all nodes in the instance (including those from
|
||||
|
|
@ -38,12 +53,12 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
|
|||
members = instance[:]
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.info("Performing extraction ...")
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=True,
|
||||
constructionHistory=True,
|
||||
|
|
@ -55,8 +70,8 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': filename,
|
||||
"stagingDir": dir_path
|
||||
}
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract model as Maya Scene."""
|
||||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -8,7 +10,7 @@ from pype.hosts.maya import lib
|
|||
|
||||
|
||||
class ExtractModel(pype.api.Extractor):
|
||||
"""Extract as Model (Maya Ascii)
|
||||
"""Extract as Model (Maya Scene).
|
||||
|
||||
Only extracts contents based on the original "setMembers" data to ensure
|
||||
publishing the least amount of required shapes. From that it only takes
|
||||
|
|
@ -22,19 +24,33 @@ class ExtractModel(pype.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Model (Maya ASCII)"
|
||||
label = "Model (Maya Scene)"
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
# no preset found
|
||||
pass
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
filename = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
|
|
@ -59,7 +75,7 @@ class ExtractModel(pype.api.Extractor):
|
|||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=False,
|
||||
|
|
@ -73,8 +89,8 @@ class ExtractModel(pype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract rig as Maya Scene."""
|
||||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -7,26 +9,40 @@ import pype.api
|
|||
|
||||
|
||||
class ExtractRig(pype.api.Extractor):
|
||||
"""Extract rig as Maya Ascii"""
|
||||
"""Extract rig as Maya Scene."""
|
||||
|
||||
label = "Extract Rig (Maya ASCII)"
|
||||
label = "Extract Rig (Maya Scene)"
|
||||
hosts = ["maya"]
|
||||
families = ["rig"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
# no preset found
|
||||
pass
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
filename = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
path = os.path.join(dir_path, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.info("Performing extraction ...")
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(instance, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
|
|
@ -38,12 +54,11 @@ class ExtractRig(pype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': filename,
|
||||
"stagingDir": dir_path
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract Yeti rig."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import contextlib
|
||||
|
|
@ -11,7 +14,7 @@ import pype.hosts.maya.lib as maya
|
|||
|
||||
@contextlib.contextmanager
|
||||
def disconnect_plugs(settings, members):
|
||||
|
||||
"""Disconnect and store attribute connections."""
|
||||
members = cmds.ls(members, long=True)
|
||||
original_connections = []
|
||||
try:
|
||||
|
|
@ -55,7 +58,7 @@ def disconnect_plugs(settings, members):
|
|||
|
||||
@contextlib.contextmanager
|
||||
def yetigraph_attribute_values(assumed_destination, resources):
|
||||
|
||||
"""Get values from Yeti attributes in graph."""
|
||||
try:
|
||||
for resource in resources:
|
||||
if "graphnode" not in resource:
|
||||
|
|
@ -89,14 +92,28 @@ def yetigraph_attribute_values(assumed_destination, resources):
|
|||
|
||||
|
||||
class ExtractYetiRig(pype.api.Extractor):
|
||||
"""Extract the Yeti rig to a MayaAscii and write the Yeti rig data"""
|
||||
"""Extract the Yeti rig to a Maya Scene and write the Yeti rig data."""
|
||||
|
||||
label = "Extract Yeti Rig"
|
||||
hosts = ["maya"]
|
||||
families = ["yetiRig"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
"""Plugin entry point."""
|
||||
ext_mapping = instance.context.data["presets"]["maya"].get("ext_mapping") # noqa: E501
|
||||
if ext_mapping:
|
||||
self.log.info("Looking in presets for scene type ...")
|
||||
# use extension mapping for first family found
|
||||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
# no preset found
|
||||
pass
|
||||
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
|
||||
if not yeti_nodes:
|
||||
raise RuntimeError("No pgYetiMaya nodes found in the instance")
|
||||
|
|
@ -106,7 +123,8 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
settings_path = os.path.join(dirname, "yeti.rigsettings")
|
||||
|
||||
# Yeti related staging dirs
|
||||
maya_path = os.path.join(dirname, "yeti_rig.ma")
|
||||
maya_path = os.path.join(
|
||||
dirname, "yeti_rig.{}".format(self.scene_type))
|
||||
|
||||
self.log.info("Writing metadata file")
|
||||
|
||||
|
|
@ -153,7 +171,7 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
cmds.file(maya_path,
|
||||
force=True,
|
||||
exportSelected=True,
|
||||
typ="mayaAscii",
|
||||
typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501
|
||||
preserveReferences=False,
|
||||
constructionHistory=True,
|
||||
shader=False)
|
||||
|
|
@ -163,21 +181,21 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
self.log.info("rig file: {}".format("yeti_rig.ma"))
|
||||
self.log.info("rig file: {}".format(maya_path))
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
'name': "ma",
|
||||
'ext': 'ma',
|
||||
'files': "yeti_rig.ma",
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': os.path.basename(maya_path),
|
||||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
self.log.info("settings file: {}".format("yeti.rigsettings"))
|
||||
self.log.info("settings file: {}".format(settings))
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
'name': 'rigsettings',
|
||||
'ext': 'rigsettings',
|
||||
'files': 'yeti.rigsettings',
|
||||
'files': os.path.basename(settings),
|
||||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import os
|
|||
import json
|
||||
import getpass
|
||||
import copy
|
||||
import re
|
||||
|
||||
import clique
|
||||
import requests
|
||||
|
|
@ -108,8 +109,8 @@ def get_renderer_variables(renderlayer, root):
|
|||
# does not work for vray.
|
||||
scene = cmds.file(query=True, sceneName=True)
|
||||
scene, _ = os.path.splitext(os.path.basename(scene))
|
||||
filename_0 = filename_prefix.replace('<Scene>', scene)
|
||||
filename_0 = filename_0.replace('<Layer>', renderlayer)
|
||||
filename_0 = re.sub('<Scene>', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = re.sub('<Layer>', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501
|
||||
filename_0 = "{}.{}.{}".format(
|
||||
filename_0, "#" * int(padding), extension)
|
||||
filename_0 = os.path.normpath(os.path.join(root, filename_0))
|
||||
|
|
@ -375,16 +376,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
for _aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
output_file = rem[0]
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
output_filenames[exp_index] = output_file
|
||||
exp_index += 1
|
||||
else:
|
||||
col = clique.assemble(files)[0][0]
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file
|
||||
# OutputFilenames[exp_index] = output_file
|
||||
col, rem = clique.assemble(files)
|
||||
if not col and rem:
|
||||
# we couldn't find any collections but have
|
||||
# individual files.
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = rem[0] # noqa: E501
|
||||
else:
|
||||
output_file = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
|
|
|||
|
|
@ -62,9 +62,16 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
for family in families:
|
||||
for preset in presets[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
attributes.update(
|
||||
{node_name: {attribute_name: presets[family][preset]}}
|
||||
)
|
||||
try:
|
||||
attributes[node_name].update(
|
||||
{attribute_name: presets[family][preset]}
|
||||
)
|
||||
except KeyError:
|
||||
attributes.update({
|
||||
node_name: {
|
||||
attribute_name: presets[family][preset]
|
||||
}
|
||||
})
|
||||
|
||||
# Get invalid attributes.
|
||||
nodes = pm.ls()
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
import pyblish.api
|
||||
from pype.action import get_errored_instances_from_context
|
||||
import pype.api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Show Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
try:
|
||||
from maya import cmds
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Maya")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes..")
|
||||
invalid = list()
|
||||
for _instance in instances:
|
||||
invalid_instances = plugin.get_invalid(context)
|
||||
if invalid_instances:
|
||||
if isinstance(invalid_instances, (list, tuple)):
|
||||
invalid.extend(invalid_instances)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
cmds.select(invalid, replace=True, noExpand=True)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
from maya import cmds
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
cmds.setAttr(instance.data.get("name") + ".asset",
|
||||
context_asset, type="string")
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.ContextPlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
"""Get invalid instances."""
|
||||
invalid = []
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
cls.log.info("we are in {}".format(context_asset))
|
||||
for instance in context:
|
||||
asset = instance.data.get("asset")
|
||||
if asset != context_asset:
|
||||
cls.log.warning("{} has asset {}".format(instance.name, asset))
|
||||
invalid.append(instance.name)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, context):
|
||||
"""Check instances."""
|
||||
invalid = self.get_invalid(context)
|
||||
if invalid:
|
||||
raise AssertionError("Some instances doesn't share same context")
|
||||
|
|
@ -106,7 +106,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"asset": avalon_knob_data["asset"],
|
||||
"label": node.name(),
|
||||
"name": node.name(),
|
||||
"subset": subset,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
from avalon import io, api
|
||||
|
||||
import nuke
|
||||
|
||||
|
||||
|
|
@ -23,6 +26,21 @@ class CollectReview(pyblish.api.InstancePlugin):
|
|||
if not node["review"].value():
|
||||
return
|
||||
|
||||
# Add audio to instance if it exists.
|
||||
try:
|
||||
version = pype.api.get_latest_version(
|
||||
instance.context.data["assetEntity"]["name"], "audioMain"
|
||||
)
|
||||
representation = io.find_one(
|
||||
{"type": "representation", "parent": version["_id"]}
|
||||
)
|
||||
instance.data["audio"] = [{
|
||||
"offset": 0,
|
||||
"filename": api.get_representation_path(representation)
|
||||
}]
|
||||
except AssertionError:
|
||||
pass
|
||||
|
||||
instance.data["families"].append("review")
|
||||
instance.data['families'].append('ftrack')
|
||||
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
if "Viewer" == n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
|
|
|
|||
|
|
@ -105,11 +105,10 @@ class IntentModel(QtGui.QStandardItemModel):
|
|||
|
||||
intents_preset = (
|
||||
config.get_presets()
|
||||
.get("tools", {})
|
||||
.get("pyblish", {})
|
||||
.get("ui", {})
|
||||
.get("intents", {})
|
||||
.get("global", {})
|
||||
.get("intent", {})
|
||||
)
|
||||
|
||||
default = intents_preset.get("default")
|
||||
items = intents_preset.get("items", {})
|
||||
if not items:
|
||||
|
|
|
|||
|
|
@ -54,5 +54,10 @@
|
|||
"type": "module",
|
||||
"import_path": "pype.modules.adobe_communicator",
|
||||
"fromlist": ["pype", "modules"]
|
||||
}, {
|
||||
"title": "Websocket Server",
|
||||
"type": "module",
|
||||
"import_path": "pype.modules.websocket_server",
|
||||
"fromlist": ["pype", "modules"]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue