diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py
index afd20d12d1..29fdfe39ae 100644
--- a/pype/ftrack/actions/action_delivery.py
+++ b/pype/ftrack/actions/action_delivery.py
@@ -312,42 +312,32 @@ class Delivery(BaseAction):
anatomy_data = copy.deepcopy(repre["context"])
anatomy_data["root"] = location_path
- anatomy_filled = anatomy.format(anatomy_data)
- test_path = (
- anatomy_filled
- .get("delivery", {})
- .get(anatomy_name)
- )
+ anatomy_filled = anatomy.format_all(anatomy_data)
+ test_path = anatomy_filled["delivery"][anatomy_name]
- if not test_path:
+ if not test_path.solved:
msg = (
"Missing keys in Representation's context"
" for anatomy template \"{}\"."
).format(anatomy_name)
- all_anatomies = anatomy.format_all(anatomy_data)
- result = None
- for anatomies in all_anatomies.values():
- for key, temp in anatomies.get("delivery", {}).items():
- if key != anatomy_name:
- continue
+ if test_path.missing_keys:
+ keys = ", ".join(test_path.missing_keys)
+ sub_msg = (
+ "Representation: {}
- Missing keys: \"{}\"
"
+ ).format(str(repre["_id"]), keys)
- result = temp
- break
+ if test_path.invalid_types:
+ items = []
+ for key, value in test_path.invalid_types.items():
+ items.append("\"{}\" {}".format(key, str(value)))
- # TODO log error! - missing keys in anatomy
- if result:
- missing_keys = [
- key[1] for key in string.Formatter().parse(result)
- if key[1] is not None
- ]
- else:
- missing_keys = ["unknown"]
+ keys = ", ".join(items)
+ sub_msg = (
+ "Representation: {}
"
+ "- Invalid value DataType: \"{}\"
"
+ ).format(str(repre["_id"]), keys)
- keys = ", ".join(missing_keys)
- sub_msg = (
- "Representation: {}
- Missing keys: \"{}\"
"
- ).format(str(repre["_id"]), keys)
self.report_items[msg].append(sub_msg)
self.log.warning(
"{} Representation: \"{}\" Filled: <{}>".format(
diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py
index 23284a2ae6..8d25b5b801 100644
--- a/pype/ftrack/events/event_sync_to_avalon.py
+++ b/pype/ftrack/events/event_sync_to_avalon.py
@@ -31,7 +31,7 @@ class SyncToAvalonEvent(BaseEvent):
"timelog", "auth_userrole", "appointment"
]
ignore_ent_types = ["Milestone"]
- ignore_keys = ["statusid"]
+ ignore_keys = ["statusid", "thumbid"]
project_query = (
"select full_name, name, custom_attributes"
@@ -486,6 +486,14 @@ class SyncToAvalonEvent(BaseEvent):
action = ent_info["action"]
ftrack_id = ent_info["entityId"]
+ if isinstance(ftrack_id, list):
+ self.log.warning((
+ "BUG REPORT: Entity info has `entityId` as `list` \"{}\""
+ ).format(ent_info))
+ if len(ftrack_id) == 0:
+ continue
+ ftrack_id = ftrack_id[0]
+
if action == "move":
ent_keys = ent_info["keys"]
# Seprate update info from move action
@@ -1820,6 +1828,13 @@ class SyncToAvalonEvent(BaseEvent):
obj_type_id = ent_info["objectTypeId"]
ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id)
+ if ent_cust_attrs is None:
+ self.log.warning((
+ "BUG REPORT: Entity has ent type without"
+ " custom attributes <{}> \"{}\""
+ ).format(entType, ent_info))
+ continue
+
for key, values in ent_info["changes"].items():
if key in hier_attrs_keys:
self.hier_cust_attrs_changes[key].append(ftrack_id)
diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py
index 87994d34b2..eaacfd959a 100644
--- a/pype/ftrack/events/event_user_assigment.py
+++ b/pype/ftrack/events/event_user_assigment.py
@@ -207,7 +207,9 @@ class UserAssigmentEvent(BaseEvent):
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy.format(data)['avalon']['work']
# we also need publish but not whole
- publish = anatomy.format_all(data)['partial']['avalon']['publish']
+ filled_all = anatomy.format_all(data)
+ publish = filled_all['avalon']['publish']
+
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),
publish)
diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py
index c688693c77..8e217870ba 100644
--- a/pype/ftrack/ftrack_server/socket_thread.py
+++ b/pype/ftrack/ftrack_server/socket_thread.py
@@ -1,4 +1,5 @@
import os
+import sys
import time
import socket
import threading
@@ -52,8 +53,7 @@ class SocketThread(threading.Thread):
)
self.subproc = subprocess.Popen(
- ["python", self.filepath, "-port", str(self.port)],
- stdout=subprocess.PIPE
+ [sys.executable, self.filepath, "-port", str(self.port)]
)
# Listen for incoming connections
@@ -115,11 +115,6 @@ class SocketThread(threading.Thread):
if self.subproc.poll() is None:
self.subproc.terminate()
- lines = self.subproc.stdout.readlines()
- if lines:
- print("*** Socked Thread stdout ***")
- for line in lines:
- os.write(1, line)
self.finished = True
def get_data_from_con(self, connection):
diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py
index 68066b33ce..f0d39447a8 100644
--- a/pype/ftrack/ftrack_server/sub_user_server.py
+++ b/pype/ftrack/ftrack_server/sub_user_server.py
@@ -2,12 +2,14 @@ import sys
import signal
import socket
+import traceback
+
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub
from pypeapp import Logger
-log = Logger().get_logger(__name__)
+log = Logger().get_logger("FtrackUserServer")
def main(args):
@@ -18,7 +20,9 @@ def main(args):
# Connect the socket to the port where the server is listening
server_address = ("localhost", port)
- log.debug("Storer connected to {} port {}".format(*server_address))
+ log.debug(
+ "User Ftrack Server connected to {} port {}".format(*server_address)
+ )
sock.connect(server_address)
sock.sendall(b"CreatedUser")
@@ -27,8 +31,10 @@ def main(args):
auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub
)
server = FtrackServer("action")
- log.debug("Launched Ftrack Event storer")
+ log.debug("Launched User Ftrack Server")
server.run_server(session=session)
+ except Exception:
+ traceback.print_exception(*sys.exc_info())
finally:
log.debug("Closing socket")
@@ -42,7 +48,6 @@ if __name__ == "__main__":
log.info(
"Process was forced to stop. Process ended."
)
- log.info("Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py
index 8cebd12a59..f08dc73c19 100644
--- a/pype/ftrack/lib/avalon_sync.py
+++ b/pype/ftrack/lib/avalon_sync.py
@@ -1722,7 +1722,11 @@ class SyncEntitiesFactory:
self.avalon_project_id = new_id
self._avalon_ents_by_id[str(new_id)] = project_item
+ if self._avalon_ents_by_ftrack_id is None:
+ self._avalon_ents_by_ftrack_id = {}
self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
+ if self._avalon_ents_by_name is None:
+ self._avalon_ents_by_name = {}
self._avalon_ents_by_name[project_item["name"]] = str(new_id)
self.create_list.append(project_item)
@@ -1991,7 +1995,7 @@ class SyncEntitiesFactory:
vis_par = ent["data"]["visualParent"]
if (
vis_par is not None and
- str(vis_par) in self.deleted_entities
+ str(vis_par) in _deleted_entities
):
continue
_ready.append(mongo_id)
diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py
index dab751c001..250872f239 100644
--- a/pype/ftrack/tray/ftrack_module.py
+++ b/pype/ftrack/tray/ftrack_module.py
@@ -171,7 +171,7 @@ class FtrackModule:
# If thread failed test Ftrack and Mongo connection
elif not self.thread_socket_server.isAlive():
- self.thread_socket_server_thread.join()
+ self.thread_socket_server.join()
self.thread_socket_server = None
ftrack_accessible = False
diff --git a/pype/maya/__init__.py b/pype/maya/__init__.py
index b4dbc52bc8..f027893a0e 100644
--- a/pype/maya/__init__.py
+++ b/pype/maya/__init__.py
@@ -162,6 +162,7 @@ def on_open(_):
# Validate FPS after update_task_from_path to
# ensure it is using correct FPS for the asset
lib.validate_fps()
+ lib.fix_incompatible_containers()
if any_outdated():
log.warning("Scene has outdated content.")
diff --git a/pype/maya/lib.py b/pype/maya/lib.py
index 0890d3863e..ec39b3556e 100644
--- a/pype/maya/lib.py
+++ b/pype/maya/lib.py
@@ -2318,6 +2318,25 @@ def get_attr_in_layer(attr, layer):
return cmds.getAttr(attr)
+def fix_incompatible_containers():
+ """Return whether the current scene has any outdated content"""
+
+ host = avalon.api.registered_host()
+ for container in host.ls():
+ loader = container['loader']
+
+ print(container['loader'])
+
+ if loader in ["MayaAsciiLoader",
+ "AbcLoader",
+ "ModelLoader",
+ "CameraLoader",
+ "RigLoader",
+ "FBXLoader"]:
+ cmds.setAttr(container["objectName"] + ".loader",
+ "ReferenceLoader", type="string")
+
+
def _null(*args):
pass
diff --git a/pype/maya/menu.py b/pype/maya/menu.py
index 5254337f03..806944c117 100644
--- a/pype/maya/menu.py
+++ b/pype/maya/menu.py
@@ -15,12 +15,13 @@ log = logging.getLogger(__name__)
def _get_menu():
"""Return the menu instance if it currently exists in Maya"""
- app = QtWidgets.QApplication.instance()
- widgets = dict((w.objectName(), w) for w in app.allWidgets())
+ widgets = dict((
+ w.objectName(), w) for w in QtWidgets.QApplication.allWidgets())
menu = widgets.get(self._menu)
return menu
+
def deferred():
log.info("Attempting to install scripts menu..")
diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py
index 141cf4c13d..dfd61f4b39 100644
--- a/pype/nuke/__init__.py
+++ b/pype/nuke/__init__.py
@@ -33,40 +33,41 @@ if os.getenv("PYBLISH_GUI", None):
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
-class NukeHandler(logging.Handler):
- '''
- Nuke Handler - emits logs into nuke's script editor.
- warning will emit nuke.warning()
- critical and fatal would popup msg dialog to alert of the error.
- '''
+# class NukeHandler(logging.Handler):
+# '''
+# Nuke Handler - emits logs into nuke's script editor.
+# warning will emit nuke.warning()
+# critical and fatal would popup msg dialog to alert of the error.
+# '''
+#
+# def __init__(self):
+# logging.Handler.__init__(self)
+# self.set_name("Pype_Nuke_Handler")
+#
+# def emit(self, record):
+# # Formated message:
+# msg = self.format(record)
+#
+# if record.levelname.lower() in [
+# # "warning",
+# "critical",
+# "fatal",
+# "error"
+# ]:
+# msg = self.format(record)
+# nuke.message(msg)
+#
+#
+# '''Adding Nuke Logging Handler'''
+# log.info([handler.get_name() for handler in logging.root.handlers[:]])
+# nuke_handler = NukeHandler()
+# if nuke_handler.get_name() \
+# not in [handler.get_name()
+# for handler in logging.root.handlers[:]]:
+# logging.getLogger().addHandler(nuke_handler)
+# logging.getLogger().setLevel(logging.INFO)
+# log.info([handler.get_name() for handler in logging.root.handlers[:]])
- def __init__(self):
- logging.Handler.__init__(self)
- self.set_name("Pype_Nuke_Handler")
-
- def emit(self, record):
- # Formated message:
- msg = self.format(record)
-
- if record.levelname.lower() in [
- # "warning",
- "critical",
- "fatal",
- "error"
- ]:
- msg = self.format(record)
- nuke.message(msg)
-
-
-'''Adding Nuke Logging Handler'''
-log.info([handler.get_name() for handler in logging.root.handlers[:]])
-nuke_handler = NukeHandler()
-if nuke_handler.get_name() \
- not in [handler.get_name()
- for handler in logging.root.handlers[:]]:
- logging.getLogger().addHandler(nuke_handler)
- logging.getLogger().setLevel(logging.INFO)
-log.info([handler.get_name() for handler in logging.root.handlers[:]])
def reload_config():
"""Attempt to reload pipeline at run-time.
@@ -113,7 +114,7 @@ def install():
family_states = [
"write",
"review",
- "nukenodes"
+ "nukenodes"
"gizmo"
]
diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py
index 7aa0395da5..db1a5919c3 100644
--- a/pype/nuke/lib.py
+++ b/pype/nuke/lib.py
@@ -21,7 +21,6 @@ from .presets import (
from .presets import (
get_anatomy
)
-# TODO: remove get_anatomy and import directly Anatomy() here
from pypeapp import Logger
log = Logger().get_logger(__name__, "nuke")
@@ -50,8 +49,6 @@ def checkInventoryVersions():
and check if the node is having actual version. If not then it will color
it to red.
"""
- # TODO: make it for all nodes not just Read (Loader
-
# get all Loader nodes by avalon attribute metadata
for each in nuke.allNodes():
if each.Class() == 'Read':
@@ -93,7 +90,6 @@ def checkInventoryVersions():
def writes_version_sync():
''' Callback synchronizing version of publishable write nodes
'''
- # TODO: make it work with new write node group
try:
rootVersion = pype.get_version_from_path(nuke.root().name())
padding = len(rootVersion)
@@ -130,7 +126,8 @@ def writes_version_sync():
os.makedirs(os.path.dirname(node_new_file), 0o766)
except Exception as e:
log.warning(
- "Write node: `{}` has no version in path: {}".format(each.name(), e))
+ "Write node: `{}` has no version in path: {}".format(
+ each.name(), e))
def version_up_script():
@@ -183,9 +180,12 @@ def format_anatomy(data):
try:
padding = int(anatomy.templates['render']['padding'])
except KeyError as e:
- log.error("`padding` key is not in `render` "
- "Anatomy template. Please, add it there and restart "
- "the pipeline (padding: \"4\"): `{}`".format(e))
+ msg = ("`padding` key is not in `render` "
+ "Anatomy template. Please, add it there and restart "
+ "the pipeline (padding: \"4\"): `{}`").format(e)
+
+ log.error(msg)
+ nuke.message(msg)
version = data.get("version", None)
if not version:
@@ -265,7 +265,9 @@ def create_write_node(name, data, input=None, prenodes=None):
anatomy_filled = format_anatomy(data)
except Exception as e:
- log.error("problem with resolving anatomy tepmlate: {}".format(e))
+ msg = "problem with resolving anatomy tepmlate: {}".format(e)
+ log.error(msg)
+ nuke.message(msg)
# build file path to workfiles
fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
@@ -543,8 +545,11 @@ class WorkfileSettings(object):
viewer_dict (dict): adjustments from presets
'''
- assert isinstance(viewer_dict, dict), log.error(
- "set_viewers_colorspace(): argument should be dictionary")
+ if not isinstance(viewer_dict, dict):
+ msg = "set_viewers_colorspace(): argument should be dictionary"
+ log.error(msg)
+ nuke.message(msg)
+ return
filter_knobs = [
"viewerProcess",
@@ -592,8 +597,10 @@ class WorkfileSettings(object):
root_dict (dict): adjustmensts from presets
'''
- assert isinstance(root_dict, dict), log.error(
- "set_root_colorspace(): argument should be dictionary")
+ if not isinstance(root_dict, dict):
+ msg = "set_root_colorspace(): argument should be dictionary"
+ log.error(msg)
+ nuke.message(msg)
log.debug(">> root_dict: {}".format(root_dict))
@@ -640,8 +647,11 @@ class WorkfileSettings(object):
'''
# TODO: complete this function so any write node in
# scene will have fixed colorspace following presets for the project
- assert isinstance(write_dict, dict), log.error(
- "set_root_colorspace(): argument should be dictionary")
+ if not isinstance(write_dict, dict):
+ msg = "set_root_colorspace(): argument should be dictionary"
+ nuke.message(msg)
+ log.error(msg)
+ return
log.debug("__ set_writes_colorspace(): {}".format(write_dict))
@@ -653,25 +663,28 @@ class WorkfileSettings(object):
try:
self.set_root_colorspace(nuke_colorspace["root"])
except AttributeError:
- log.error(
- "set_colorspace(): missing `root` settings in template")
+ msg = "set_colorspace(): missing `root` settings in template"
+
try:
self.set_viewers_colorspace(nuke_colorspace["viewer"])
except AttributeError:
- log.error(
- "set_colorspace(): missing `viewer` settings in template")
+ msg = "set_colorspace(): missing `viewer` settings in template"
+ nuke.message(msg)
+ log.error(msg)
try:
self.set_writes_colorspace(nuke_colorspace["write"])
except AttributeError:
- log.error(
- "set_colorspace(): missing `write` settings in template")
+ msg = "set_colorspace(): missing `write` settings in template"
+ nuke.message(msg)
+ log.error(msg)
try:
for key in nuke_colorspace:
log.debug("Preset's colorspace key: {}".format(key))
except TypeError:
- log.error("Nuke is not in templates! \n\n\n"
- "contact your supervisor!")
+ msg = "Nuke is not in templates! Contact your supervisor!"
+ nuke.message(msg)
+ log.error(msg)
def reset_frame_range_handles(self):
"""Set frame range to current asset"""
@@ -758,13 +771,13 @@ class WorkfileSettings(object):
}
if any(x for x in data.values() if x is None):
- log.error(
- "Missing set shot attributes in DB."
- "\nContact your supervisor!."
- "\n\nWidth: `{width}`"
- "\nHeight: `{height}`"
- "\nPixel Asspect: `{pixel_aspect}`".format(**data)
- )
+ msg = ("Missing set shot attributes in DB."
+ "\nContact your supervisor!."
+ "\n\nWidth: `{width}`"
+ "\nHeight: `{height}`"
+ "\nPixel Asspect: `{pixel_aspect}`").format(**data)
+ log.error(msg)
+ nuke.message(msg)
bbox = self._asset_entity.get('data', {}).get('crop')
@@ -781,10 +794,10 @@ class WorkfileSettings(object):
)
except Exception as e:
bbox = None
- log.error(
- "{}: {} \nFormat:Crop need to be set with dots, example: "
- "0.0.1920.1080, /nSetting to default".format(__name__, e)
- )
+ msg = ("{}:{} \nFormat:Crop need to be set with dots, example: "
+ "0.0.1920.1080, /nSetting to default").format(__name__, e)
+ log.error(msg)
+ nuke.message(msg)
existing_format = None
for format in nuke.formats():
diff --git a/pype/nuke/presets.py b/pype/nuke/presets.py
index e0c12e2671..a413ccc878 100644
--- a/pype/nuke/presets.py
+++ b/pype/nuke/presets.py
@@ -1,6 +1,6 @@
from pype import api as pype
from pypeapp import Anatomy, config
-
+import nuke
log = pype.Logger().get_logger(__name__, "nuke")
@@ -28,7 +28,7 @@ def get_node_dataflow_preset(**kwarg):
families = kwarg.get("families", [])
preset = kwarg.get("preset", None) # omit < 2.0.0v
- assert any([host, cls]), log.error(
+ assert any([host, cls]), nuke.message(
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
nuke_dataflow = get_dataflow_preset().get(str(host), None)
@@ -56,8 +56,10 @@ def get_node_colorspace_preset(**kwarg):
families = kwarg.get("families", [])
preset = kwarg.get("preset", None) # omit < 2.0.0v
- assert any([host, cls]), log.error(
- "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
+ if not any([host, cls]):
+ msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)
+ log.error(msg)
+ nuke.message(msg)
nuke_colorspace = get_colorspace_preset().get(str(host), None)
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py
similarity index 94%
rename from pype/plugins/ftrack/publish/integrate_ftrack_comments.py
rename to pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py
index 4f7afb4346..4be9f7fc3a 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py
+++ b/pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py
@@ -7,7 +7,7 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin):
"""Create comments in Ftrack."""
order = pyblish.api.IntegratorOrder
- label = "Integrate Comments to Ftrack."
+ label = "Integrate Comments to Ftrack"
families = ["shot"]
enabled = False
diff --git a/pype/plugins/ftrack/publish/collect_ftrack_api.py b/pype/plugins/ftrack/publish/collect_ftrack_api.py
index d09baec676..f79d74453b 100644
--- a/pype/plugins/ftrack/publish/collect_ftrack_api.py
+++ b/pype/plugins/ftrack/publish/collect_ftrack_api.py
@@ -23,25 +23,43 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
# Collect session
session = ftrack_api.Session()
+ self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
context.data["ftrackSession"] = session
# Collect task
- project = os.environ.get('AVALON_PROJECT', '')
- asset = os.environ.get('AVALON_ASSET', '')
- task = os.environ.get('AVALON_TASK', None)
- self.log.debug(task)
+ project_name = os.environ.get('AVALON_PROJECT', '')
+ asset_name = os.environ.get('AVALON_ASSET', '')
+ task_name = os.environ.get('AVALON_TASK', None)
+
+ # Find project entity
+ project_query = 'Project where full_name is "{0}"'.format(project_name)
+ self.log.debug("Project query: < {0} >".format(project_query))
+ project_entity = session.query(project_query).one()
+ self.log.debug("Project found: {0}".format(project_entity))
+
+ # Find asset entity
+ entity_query = (
+ 'TypedContext where project_id is "{0}"'
+ ' and name is "{1}"'
+ ).format(project_entity["id"], asset_name)
+ self.log.debug("Asset entity query: < {0} >".format(entity_query))
+ asset_entity = session.query(entity_query).one()
+ self.log.debug("Asset found: {0}".format(asset_entity))
+
+ # Find task entity if task is set
+ if task_name:
+ task_query = (
+ 'Task where name is "{0}" and parent_id is "{1}"'
+ ).format(task_name, asset_entity["id"])
+ self.log.debug("Task entity query: < {0} >".format(task_query))
+ task_entity = session.query(task_query).one()
+ self.log.debug("Task entity found: {0}".format(task_entity))
- if task:
- result = session.query('Task where\
- project.full_name is "{0}" and\
- name is "{1}" and\
- parent.name is "{2}"'.format(project, task, asset)).one()
- context.data["ftrackTask"] = result
else:
- result = session.query('TypedContext where\
- project.full_name is "{0}" and\
- name is "{1}"'.format(project, asset)).one()
- context.data["ftrackEntity"] = result
+ task_entity = None
+ self.log.warning("Task name is not set.")
- self.log.info(result)
+ context.data["ftrackProject"] = asset_entity
+ context.data["ftrackEntity"] = asset_entity
+ context.data["ftrackTask"] = task_entity
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
index c51685f84d..cd94b2a150 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
@@ -77,6 +77,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
info_msg = "Created new {entity_type} with data: {data}"
info_msg += ", metadata: {metadata}."
+ used_asset_versions = []
# Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []):
@@ -148,6 +149,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
assetversion_cust_attrs = _assetversion_data.pop(
"custom_attributes", {}
)
+ asset_version_comment = _assetversion_data.pop(
+ "comment", None
+ )
assetversion_data.update(_assetversion_data)
assetversion_entity = session.query(
@@ -185,6 +189,20 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
existing_assetversion_metadata.update(assetversion_metadata)
assetversion_entity["metadata"] = existing_assetversion_metadata
+ # Add comment
+ if asset_version_comment:
+ assetversion_entity["comment"] = asset_version_comment
+ try:
+ session.commit()
+ except Exception:
+ session.rollback()
+ self.log.warning((
+ "Comment was not possible to set for AssetVersion"
+ "\"{0}\". Can't set it's value to: \"{1}\""
+ ).format(
+ assetversion_entity["id"], str(asset_version_comment)
+ ))
+
# Adding Custom Attributes
for attr, val in assetversion_cust_attrs.items():
if attr in assetversion_entity["custom_attributes"]:
@@ -369,3 +387,14 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
+
+ if assetversion_entity not in used_asset_versions:
+ used_asset_versions.append(assetversion_entity)
+
+ asset_versions_key = "ftrackIntegratedAssetVersions"
+ if asset_versions_key not in instance.data:
+ instance.data[asset_versions_key] = []
+
+ for asset_version in used_asset_versions:
+ if asset_version not in instance.data[asset_versions_key]:
+ instance.data[asset_versions_key].append(asset_version)
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py
new file mode 100644
index 0000000000..f7fb5addbb
--- /dev/null
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py
@@ -0,0 +1,51 @@
+import sys
+import pyblish.api
+import six
+
+
+class IntegrateFtrackNote(pyblish.api.InstancePlugin):
+ """Create comments in Ftrack."""
+
+ # Must be after integrate asset new
+ order = pyblish.api.IntegratorOrder + 0.4999
+ label = "Integrate Ftrack note"
+ families = ["ftrack"]
+ optional = True
+
+ def process(self, instance):
+ comment = (instance.context.data.get("comment") or "").strip()
+ if not comment:
+ self.log.info("Comment is not set.")
+ return
+
+ self.log.debug("Comment is set to {}".format(comment))
+
+ asset_versions_key = "ftrackIntegratedAssetVersions"
+ asset_versions = instance.data.get(asset_versions_key)
+ if not asset_versions:
+ self.log.info("There are any integrated AssetVersions")
+ return
+
+ session = instance.context.data["ftrackSession"]
+ user = session.query(
+ "User where username is \"{}\"".format(session.api_user)
+ ).first()
+ if not user:
+ self.log.warning(
+ "Was not able to query current User {}".format(
+ session.api_user
+ )
+ )
+
+ for asset_version in asset_versions:
+ asset_version.create_note(comment, author=user)
+
+ try:
+ session.commit()
+ self.log.debug("Note added to AssetVersion \"{}\"".format(
+ str(asset_version)
+ ))
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
diff --git a/pype/plugins/ftrack/publish/integrate_remove_components.py b/pype/plugins/ftrack/publish/integrate_remove_components.py
index bad50f7200..26cac0f1ae 100644
--- a/pype/plugins/ftrack/publish/integrate_remove_components.py
+++ b/pype/plugins/ftrack/publish/integrate_remove_components.py
@@ -11,13 +11,13 @@ class IntegrateCleanComponentData(pyblish.api.InstancePlugin):
label = 'Clean component data'
families = ["ftrack"]
optional = True
- active = True
+ active = False
def process(self, instance):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
-
+
if "%" in comp['published_path'] or "#" in comp['published_path']:
continue
diff --git a/pype/plugins/global/publish/collect_comment.py b/pype/plugins/global/publish/collect_comment.py
index 22970665a1..062142ace9 100644
--- a/pype/plugins/global/publish/collect_comment.py
+++ b/pype/plugins/global/publish/collect_comment.py
@@ -15,4 +15,5 @@ class CollectComment(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
def process(self, context):
- context.data["comment"] = ""
+ comment = (context.data.get("comment") or "").strip()
+ context.data["comment"] = comment
diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py
index 9aa96b0e33..6c06229304 100644
--- a/pype/plugins/global/publish/collect_filesequences.py
+++ b/pype/plugins/global/publish/collect_filesequences.py
@@ -101,6 +101,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
lut_path = None
slate_frame = None
families_data = None
+ baked_mov_path = None
subset = None
version = None
frame_start = 0
@@ -427,6 +428,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
"name": ext,
"ext": "{}".format(ext),
"files": list(collection),
+ "frameStart": start,
+ "frameEnd": end,
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py
index 26f6d34e91..508c7f1d83 100644
--- a/pype/plugins/global/publish/extract_burnin.py
+++ b/pype/plugins/global/publish/extract_burnin.py
@@ -107,9 +107,9 @@ class ExtractBurnin(pype.api.Extractor):
# create copy of prep_data for anatomy formatting
_prep_data = copy.deepcopy(prep_data)
_prep_data["representation"] = repre["name"]
- _prep_data["anatomy"] = (
- anatomy.format_all(_prep_data).get("solved") or {}
- )
+ filled_anatomy = anatomy.format_all(_prep_data)
+ _prep_data["anatomy"] = filled_anatomy.get_solved()
+
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"codec": repre.get("codec", []),
diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py
index 00e8a6fedf..4978649ba2 100644
--- a/pype/plugins/global/publish/extract_jpeg.py
+++ b/pype/plugins/global/publish/extract_jpeg.py
@@ -6,7 +6,7 @@ import pype.api
class ExtractJpegEXR(pyblish.api.InstancePlugin):
- """Resolve any dependency issies
+ """Resolve any dependency issues
This plug-in resolves any paths which, if not updated might break
the published file.
@@ -55,8 +55,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
filename = os.path.splitext(input_file)[0]
if not filename.endswith('.'):
filename += "."
- jpegFile = filename + "jpg"
- full_output_path = os.path.join(stagingdir, jpegFile)
+ jpeg_file = filename + "jpg"
+ full_output_path = os.path.join(stagingdir, jpeg_file)
self.log.info("output {}".format(full_output_path))
@@ -87,9 +87,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
instance.data["representations"] = []
representation = {
- 'name': 'jpg',
+ 'name': 'thumbnail',
'ext': 'jpg',
- 'files': jpegFile,
+ 'files': jpeg_file,
"stagingDir": stagingdir,
"thumbnail": True,
"tags": ['thumbnail']
diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py
index e24bad362d..87b9e1a9bd 100644
--- a/pype/plugins/global/publish/integrate.py
+++ b/pype/plugins/global/publish/integrate.py
@@ -24,7 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
- families = ["assembly"]
+ families = []
exclude_families = ["clip"]
def process(self, instance):
diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py
index 1be712c14a..9729716a50 100644
--- a/pype/plugins/global/publish/integrate_new.py
+++ b/pype/plugins/global/publish/integrate_new.py
@@ -76,6 +76,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"source",
"matchmove",
"image"
+ "source",
+ "assembly"
]
exclude_families = ["clip"]
@@ -326,8 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
index_frame_start = None
if repre.get("frameStart"):
- frame_start_padding = len(str(
- repre.get("frameEnd")))
+ frame_start_padding = anatomy.templates["render"]["padding"]
index_frame_start = int(repre.get("frameStart"))
# exception for slate workflow
@@ -402,6 +403,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ dst: {}".format(dst))
representation = {
+ "_id": io.ObjectId(),
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
@@ -444,6 +446,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ represNAME: {}".format(rep['name']))
self.log.debug("__ represPATH: {}".format(rep['published_path']))
io.insert_many(representations)
+ instance.data["published_representations"] = representations
# self.log.debug("Representation: {}".format(representations))
self.log.info("Registered {} items".format(len(representations)))
diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py
new file mode 100644
index 0000000000..1c4399b386
--- /dev/null
+++ b/pype/plugins/global/publish/integrate_thumbnail.py
@@ -0,0 +1,139 @@
+import os
+import sys
+import errno
+import shutil
+import copy
+
+import six
+import pyblish.api
+from bson.objectid import ObjectId
+
+from avalon import api, io
+
+
+class IntegrateThumbnails(pyblish.api.InstancePlugin):
+ """Integrate Thumbnails."""
+
+ label = "Integrate Thumbnails"
+ order = pyblish.api.IntegratorOrder + 0.01
+ families = ["review"]
+
+ def process(self, instance):
+
+ if not os.environ.get("AVALON_THUMBNAIL_ROOT"):
+ self.log.info("AVALON_THUMBNAIL_ROOT is not set."
+ " Skipping thumbnail integration.")
+ return
+
+ published_repres = instance.data.get("published_representations")
+ if not published_repres:
+ self.log.debug(
+ "There are not published representation ids on the instance."
+ )
+ return
+
+ project_name = api.Session["AVALON_PROJECT"]
+
+ anatomy = instance.context.data["anatomy"]
+ if "publish" not in anatomy.templates:
+ raise AssertionError("Anatomy does not have set publish key!")
+
+ if "thumbnail" not in anatomy.templates["publish"]:
+ raise AssertionError((
+ "There is not set \"thumbnail\" template for project \"{}\""
+ ).format(project_name))
+
+ thumbnail_template = anatomy.templates["publish"]["thumbnail"]
+
+ io.install()
+
+ thumb_repre = None
+ for repre in published_repres:
+ if repre["name"].lower() == "thumbnail":
+ thumb_repre = repre
+ break
+
+ if not thumb_repre:
+ self.log.debug(
+ "There is not representation with name \"thumbnail\""
+ )
+ return
+
+ version = io.find_one({"_id": thumb_repre["parent"]})
+ if not version:
+ raise AssertionError(
+ "There does not exist version with id {}".format(
+ str(thumb_repre["parent"])
+ )
+ )
+
+ # Get full path to thumbnail file from representation
+ src_full_path = os.path.normpath(thumb_repre["data"]["path"])
+ if not os.path.exists(src_full_path):
+ self.log.warning("Thumbnail file was not found. Path: {}".format(
+ src_full_path
+ ))
+ return
+
+ filename, file_extension = os.path.splitext(src_full_path)
+ # Create id for mongo entity now to fill anatomy template
+ thumbnail_id = ObjectId()
+
+ # Prepare anatomy template fill data
+ template_data = copy.deepcopy(thumb_repre["context"])
+ template_data.update({
+ "_id": str(thumbnail_id),
+ "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"),
+ "ext": file_extension,
+ "thumbnail_type": "thumbnail"
+ })
+
+ anatomy_filled = anatomy.format(template_data)
+ final_path = anatomy_filled.get("publish", {}).get("thumbnail")
+ if not final_path:
+ raise AssertionError((
+ "Anatomy template was not filled with entered data"
+ "\nTemplate: {} "
+ "\nData: {}"
+ ).format(thumbnail_template, str(template_data)))
+
+ dst_full_path = os.path.normpath(final_path)
+ self.log.debug(
+ "Copying file .. {} -> {}".format(src_full_path, dst_full_path)
+ )
+ dirname = os.path.dirname(dst_full_path)
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ tp, value, tb = sys.exc_info()
+ six.reraise(tp, value, tb)
+
+ shutil.copy(src_full_path, dst_full_path)
+
+ # Clean template data from keys that are dynamic
+ template_data.pop("_id")
+ template_data.pop("thumbnail_root")
+
+ thumbnail_entity = {
+ "_id": thumbnail_id,
+ "type": "thumbnail",
+ "schema": "pype:thumbnail-1.0",
+ "data": {
+ "template": thumbnail_template,
+ "template_data": template_data
+ }
+ }
+ # Create thumbnail entity
+ io.insert_one(thumbnail_entity)
+ self.log.debug(
+ "Creating entity in database {}".format(str(thumbnail_entity))
+ )
+ # Set thumbnail id for version
+ io.update_many(
+ {"_id": version["_id"]},
+ {"$set": {"data.thumbnail_id": thumbnail_id}}
+ )
+ self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
+ version["name"], str(version["_id"])
+ ))
diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py
index c01cb379d4..a9fa8febd4 100644
--- a/pype/plugins/global/publish/submit_publish_job.py
+++ b/pype/plugins/global/publish/submit_publish_job.py
@@ -162,6 +162,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"FTRACK_API_KEY",
"FTRACK_SERVER",
"PYPE_ROOT",
+ "PYPE_METADATA_FILE",
"PYPE_STUDIO_PROJECTS_PATH",
"PYPE_STUDIO_PROJECTS_MOUNT"
]
@@ -185,7 +186,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT'])
- network_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_PATH'])
+ network_root = os.path.normpath(
+ os.environ['PYPE_STUDIO_PROJECTS_PATH'])
metadata_path = metadata_path.replace(mount_root, network_root)
@@ -204,7 +206,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"PluginInfo": {
"Version": "3.6",
"ScriptFile": _get_script(),
- "Arguments": '--paths "{}"'.format(metadata_path),
+ "Arguments": "",
"SingleFrameOnly": "True"
},
@@ -216,7 +218,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# job so they use the same environment
environment = job["Props"].get("Env", {})
-
+ environment["PYPE_METADATA_FILE"] = metadata_path
i = 0
for index, key in enumerate(environment):
self.log.info("KEY: {}".format(key))
@@ -254,6 +256,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"""
# Get a submission job
data = instance.data.copy()
+ if hasattr(instance, "_log"):
+ data['_log'] = instance._log
render_job = data.pop("deadlineSubmissionJob", None)
submission_type = "deadline"
diff --git a/pype/plugins/maya/load/actions.py b/pype/plugins/maya/load/actions.py
index 9f6a5c4d34..77d18b0ee3 100644
--- a/pype/plugins/maya/load/actions.py
+++ b/pype/plugins/maya/load/actions.py
@@ -140,9 +140,9 @@ class ImportMayaLoader(api.Loader):
message = "Are you sure you want import this"
state = QtWidgets.QMessageBox.warning(None,
- "Are you sure?",
- message,
- buttons=buttons,
- defaultButton=accept)
+ "Are you sure?",
+ message,
+ buttons=buttons,
+ defaultButton=accept)
return state == accept
diff --git a/pype/plugins/maya/load/load_camera.py b/pype/plugins/maya/load/load_camera.py
deleted file mode 100644
index e9bf265b98..0000000000
--- a/pype/plugins/maya/load/load_camera.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import pype.maya.plugin
-import os
-from pypeapp import config
-
-
-class CameraLoader(pype.maya.plugin.ReferenceLoader):
- """Specific loader of Alembic for the pype.camera family"""
-
- families = ["camera"]
- label = "Reference camera"
- representations = ["abc", "ma"]
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
- # Get family type from the context
-
- try:
- family = context["representation"]["context"]["family"]
- except ValueError:
- family = "camera"
-
- cmds.loadPlugin("AbcImport.mll", quiet=True)
- groupName = "{}:{}".format(namespace, name)
- nodes = cmds.file(self.fname,
- namespace=namespace,
- sharedReferenceFile=False,
- groupReference=True,
- groupName="{}:{}".format(namespace, name),
- reference=True,
- returnNewNodes=True)
-
- cameras = cmds.ls(nodes, type="camera")
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
-
- c = colors.get(family)
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- # Check the Maya version, lockTransform has been introduced since
- # Maya 2016.5 Ext 2
- version = int(cmds.about(version=True))
- if version >= 2016:
- for camera in cameras:
- cmds.camera(camera, edit=True, lockTransform=True)
- else:
- self.log.warning("This version of Maya does not support locking of"
- " transforms of cameras.")
-
- self[:] = nodes
-
- return nodes
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_fbx.py b/pype/plugins/maya/load/load_fbx.py
deleted file mode 100644
index 14df300c3c..0000000000
--- a/pype/plugins/maya/load/load_fbx.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import pype.maya.plugin
-import os
-from pypeapp import config
-
-
-class FBXLoader(pype.maya.plugin.ReferenceLoader):
- """Load the FBX"""
-
- families = ["fbx"]
- representations = ["fbx"]
-
- label = "Reference FBX"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
- from avalon import maya
-
- try:
- family = context["representation"]["context"]["family"]
- except ValueError:
- family = "fbx"
-
- # Ensure FBX plug-in is loaded
- cmds.loadPlugin("fbxmaya", quiet=True)
-
- with maya.maintained_selection():
- nodes = cmds.file(self.fname,
- namespace=namespace,
- reference=True,
- returnNewNodes=True,
- groupReference=True,
- groupName="{}:{}".format(namespace, name))
-
- groupName = "{}:{}".format(namespace, name)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
-
- c = colors.get(family)
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- self[:] = nodes
-
- return nodes
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py
deleted file mode 100644
index b9a5de2782..0000000000
--- a/pype/plugins/maya/load/load_mayaascii.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import pype.maya.plugin
-from pypeapp import config
-import os
-
-
-class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
- """Load the model"""
-
- families = ["mayaAscii",
- "setdress",
- "layout"]
- representations = ["ma"]
-
- label = "Reference Maya Ascii"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
- from avalon import maya
-
- try:
- family = context["representation"]["context"]["family"]
- except ValueError:
- family = "model"
-
- with maya.maintained_selection():
- nodes = cmds.file(self.fname,
- namespace=namespace,
- reference=True,
- returnNewNodes=True,
- groupReference=True,
- groupName="{}:{}".format(namespace, name))
-
- self[:] = nodes
- groupName = "{}:{}".format(namespace, name)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
-
- c = colors.get(family)
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
- cmds.setAttr(groupName + ".displayHandle", 1)
- # get bounding box
- bbox = cmds.exactWorldBoundingBox(groupName)
- # get pivot position on world space
- pivot = cmds.xform(groupName, q=True, sp=True, ws=True)
- # center of bounding box
- cx = (bbox[0] + bbox[3]) / 2
- cy = (bbox[1] + bbox[4]) / 2
- cz = (bbox[2] + bbox[5]) / 2
- # add pivot position to calculate offset
- cx = cx + pivot[0]
- cy = cy + pivot[1]
- cz = cz + pivot[2]
- # set selection handle offset to center of bounding box
- cmds.setAttr(groupName + ".selectHandleX", cx)
- cmds.setAttr(groupName + ".selectHandleY", cy)
- cmds.setAttr(groupName + ".selectHandleZ", cz)
- return nodes
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py
index 376fcc2c01..cbd1da7cbd 100644
--- a/pype/plugins/maya/load/load_reference.py
+++ b/pype/plugins/maya/load/load_reference.py
@@ -1,4 +1,6 @@
import pype.maya.plugin
+from avalon import api, maya
+from maya import cmds
import os
from pypeapp import config
@@ -6,8 +8,15 @@ from pypeapp import config
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
- families = ["model", "pointcache", "animation"]
- representations = ["ma", "abc"]
+ families = ["model",
+ "pointcache",
+ "animation",
+ "mayaAscii",
+ "setdress",
+ "layout",
+ "camera",
+ "rig"]
+ representations = ["ma", "abc", "fbx"]
tool_names = ["loader"]
label = "Reference"
@@ -37,27 +46,29 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
reference=True,
returnNewNodes=True)
- namespace = cmds.referenceQuery(nodes[0], namespace=True)
+ # namespace = cmds.referenceQuery(nodes[0], namespace=True)
shapes = cmds.ls(nodes, shapes=True, long=True)
- print(shapes)
newNodes = (list(set(nodes) - set(shapes)))
- print(newNodes)
+
+ current_namespace = pm.namespaceInfo(currentNamespace=True)
+
+ if current_namespace != ":":
+ groupName = current_namespace + ":" + groupName
groupNode = pm.PyNode(groupName)
roots = set()
- print(nodes)
for node in newNodes:
try:
roots.add(pm.PyNode(node).getAllParents()[-2])
- except:
+ except: # noqa: E722
pass
for root in roots:
root.setParent(world=True)
- groupNode.root().zeroTransformPivots()
+ groupNode.zeroTransformPivots()
for root in roots:
root.setParent(groupNode)
@@ -90,23 +101,39 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".selectHandleY", cy)
cmds.setAttr(groupName + ".selectHandleZ", cz)
+ if data.get("post_process", True):
+ if family == "rig":
+ self._post_process_rig(name, namespace, context, data)
+
return newNodes
def switch(self, container, representation):
self.update(container, representation)
+ def _post_process_rig(self, name, namespace, context, data):
-# for backwards compatibility
-class AbcLoader(ReferenceLoader):
- label = "Deprecated loader (don't use)"
- families = ["pointcache", "animation"]
- representations = ["abc"]
- tool_names = []
+ output = next((node for node in self if
+ node.endswith("out_SET")), None)
+ controls = next((node for node in self if
+ node.endswith("controls_SET")), None)
+ assert output, "No out_SET in rig, this is a bug."
+ assert controls, "No controls_SET in rig, this is a bug."
-# for backwards compatibility
-class ModelLoader(ReferenceLoader):
- label = "Deprecated loader (don't use)"
- families = ["model", "pointcache"]
- representations = ["abc"]
- tool_names = []
+ # Find the roots amongst the loaded nodes
+ roots = cmds.ls(self[:], assemblies=True, long=True)
+ assert roots, "No root nodes in rig, this is a bug."
+
+ asset = api.Session["AVALON_ASSET"]
+ dependency = str(context["representation"]["_id"])
+
+ self.log.info("Creating subset: {}".format(namespace))
+
+ # Create the animation instance
+ with maya.maintained_selection():
+ cmds.select([output, controls] + roots, noExpand=True)
+ api.create(name=namespace,
+ asset=asset,
+ family="animation",
+ options={"useSelection": True},
+ data={"dependencies": dependency})
diff --git a/pype/plugins/maya/load/load_rig.py b/pype/plugins/maya/load/load_rig.py
deleted file mode 100644
index fc6e666ac6..0000000000
--- a/pype/plugins/maya/load/load_rig.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from maya import cmds
-
-import pype.maya.plugin
-from avalon import api, maya
-import os
-from pypeapp import config
-
-
-class RigLoader(pype.maya.plugin.ReferenceLoader):
- """Specific loader for rigs
-
- This automatically creates an instance for animators upon load.
-
- """
-
- families = ["rig"]
- representations = ["ma"]
-
- label = "Reference rig"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- try:
- family = context["representation"]["context"]["family"]
- except ValueError:
- family = "rig"
-
- groupName = "{}:{}".format(namespace, name)
- nodes = cmds.file(self.fname,
- namespace=namespace,
- reference=True,
- returnNewNodes=True,
- groupReference=True,
- groupName=groupName)
-
- cmds.xform(groupName, pivots=(0, 0, 0))
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
-
- c = colors.get(family)
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- shapes = cmds.ls(nodes, shapes=True, long=True)
- print(shapes)
-
- newNodes = (list(set(nodes) - set(shapes)))
- print(newNodes)
-
- # Store for post-process
- self[:] = newNodes
- if data.get("post_process", True):
- self._post_process(name, namespace, context, data)
-
- return newNodes
-
- def _post_process(self, name, namespace, context, data):
-
- # TODO(marcus): We are hardcoding the name "out_SET" here.
- # Better register this keyword, so that it can be used
- # elsewhere, such as in the Integrator plug-in,
- # without duplication.
-
- output = next((node for node in self if
- node.endswith("out_SET")), None)
- controls = next((node for node in self if
- node.endswith("controls_SET")), None)
-
- assert output, "No out_SET in rig, this is a bug."
- assert controls, "No controls_SET in rig, this is a bug."
-
- # Find the roots amongst the loaded nodes
- roots = cmds.ls(self[:], assemblies=True, long=True)
- assert roots, "No root nodes in rig, this is a bug."
-
- asset = api.Session["AVALON_ASSET"]
- dependency = str(context["representation"]["_id"])
-
- # Create the animation instance
- with maya.maintained_selection():
- cmds.select([output, controls] + roots, noExpand=True)
- api.create(name=namespace,
- asset=asset,
- family="animation",
- options={"useSelection": True},
- data={"dependencies": dependency})
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_vrayproxy.py b/pype/plugins/maya/load/load_vrayproxy.py
index 9b07dc7e30..35d93676a0 100644
--- a/pype/plugins/maya/load/load_vrayproxy.py
+++ b/pype/plugins/maya/load/load_vrayproxy.py
@@ -117,7 +117,7 @@ class VRayProxyLoader(api.Loader):
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
- name="{}_VRMM".format(name))
+ name="{}_VRMM".format(name))
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
empty=True,
renderable=True,
diff --git a/pype/plugins/maya/publish/extract_assembly.py b/pype/plugins/maya/publish/extract_assembly.py
index 26b16a73c4..c12d57e836 100644
--- a/pype/plugins/maya/publish/extract_assembly.py
+++ b/pype/plugins/maya/publish/extract_assembly.py
@@ -22,11 +22,11 @@ class ExtractAssembly(pype.api.Extractor):
def process(self, instance):
- parent_dir = self.staging_dir(instance)
+ staging_dir = self.staging_dir(instance)
hierarchy_filename = "{}.abc".format(instance.name)
- hierarchy_path = os.path.join(parent_dir, hierarchy_filename)
+ hierarchy_path = os.path.join(staging_dir, hierarchy_filename)
json_filename = "{}.json".format(instance.name)
- json_path = os.path.join(parent_dir, json_filename)
+ json_path = os.path.join(staging_dir, json_filename)
self.log.info("Dumping scene data for debugging ..")
with open(json_path, "w") as filepath:
@@ -46,8 +46,24 @@ class ExtractAssembly(pype.api.Extractor):
"uvWrite": True,
"selection": True})
- instance.data["files"] = [json_filename, hierarchy_filename]
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+ representation_abc = {
+ 'name': 'abc',
+ 'ext': 'abc',
+ 'files': hierarchy_filename,
+ "stagingDir": staging_dir
+ }
+ instance.data["representations"].append(representation_abc)
+
+ representation_json = {
+ 'name': 'json',
+ 'ext': 'json',
+ 'files': json_filename,
+ "stagingDir": staging_dir
+ }
+ instance.data["representations"].append(representation_json)
# Remove data
instance.data.pop("scenedata", None)
diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py
index 1031955260..94b5a716a2 100644
--- a/pype/plugins/maya/publish/extract_quicktime.py
+++ b/pype/plugins/maya/publish/extract_quicktime.py
@@ -1,16 +1,14 @@
import os
+import glob
import contextlib
-import capture_gui
import clique
+import capture
#
import pype.maya.lib as lib
import pype.api
#
from maya import cmds, mel
import pymel.core as pm
-# import ffmpeg
-# # from pype.scripts import otio_burnin
-# reload(ffmpeg)
# TODO: move codec settings to presets
@@ -93,7 +91,18 @@ class ExtractQuicktime(pype.api.Extractor):
pm.currentTime(refreshFrameInt, edit=True)
with maintained_time():
- playblast = capture_gui.lib.capture_scene(preset)
+ filename = preset.get("filename", "%TEMP%")
+
+ # Force viewer to False in call to capture because we have our own
+ # viewer opening call to allow a signal to trigger between playblast
+ # and viewer
+ preset['viewer'] = False
+
+ # Remove panel key since it's internal value to capture_gui
+ preset.pop("panel", None)
+
+ path = capture.capture(**preset)
+ playblast = self._fix_playblast_output_path(path)
self.log.info("file list {}".format(playblast))
@@ -119,6 +128,46 @@ class ExtractQuicktime(pype.api.Extractor):
}
instance.data["representations"].append(representation)
+ def _fix_playblast_output_path(self, filepath):
+ """Workaround a bug in maya.cmds.playblast to return correct filepath.
+
+ When the `viewer` argument is set to False and maya.cmds.playblast
+ does not automatically open the playblasted file the returned
+ filepath does not have the file's extension added correctly.
+
+ To workaround this we just glob.glob() for any file extensions and
+ assume the latest modified file is the correct file and return it.
+
+ """
+ # Catch cancelled playblast
+ if filepath is None:
+ self.log.warning("Playblast did not result in output path. "
+ "Playblast is probably interrupted.")
+ return None
+
+ # Fix: playblast not returning correct filename (with extension)
+ # Lets assume the most recently modified file is the correct one.
+ if not os.path.exists(filepath):
+ directory = os.path.dirname(filepath)
+ filename = os.path.basename(filepath)
+ # check if the filepath is has frame based filename
+ # example : capture.####.png
+ parts = filename.split(".")
+ if len(parts) == 3:
+ query = os.path.join(directory, "{}.*.{}".format(parts[0],
+ parts[-1]))
+ files = glob.glob(query)
+ else:
+ files = glob.glob("{}.*".format(filepath))
+
+ if not files:
+ raise RuntimeError("Couldn't find playblast from: "
+ "{0}".format(filepath))
+ filepath = max(files, key=os.path.getmtime)
+
+ return filepath
+
+
@contextlib.contextmanager
def maintained_time():
diff --git a/pype/plugins/maya/publish/extract_thumbnail.py b/pype/plugins/maya/publish/extract_thumbnail.py
index dc8044cf19..8377af1ac0 100644
--- a/pype/plugins/maya/publish/extract_thumbnail.py
+++ b/pype/plugins/maya/publish/extract_thumbnail.py
@@ -1,31 +1,14 @@
import os
import contextlib
-import time
-import sys
+import glob
-import capture_gui
-import clique
+import capture
import pype.maya.lib as lib
import pype.api
from maya import cmds
import pymel.core as pm
-# import ffmpeg
-# reload(ffmpeg)
-
-import avalon.maya
-
-# import maya_utils as mu
-
-# from tweakHUD import master
-# from tweakHUD import draft_hud as dHUD
-# from tweakHUD import ftrackStrings as fStrings
-
-#
-# def soundOffsetFunc(oSF, SF, H):
-# tmOff = (oSF - H) - SF
-# return tmOff
class ExtractThumbnail(pype.api.Extractor):
@@ -47,39 +30,8 @@ class ExtractThumbnail(pype.api.Extractor):
end = cmds.currentTime(query=True)
self.log.info("start: {}, end: {}".format(start, end))
- members = instance.data['setMembers']
camera = instance.data['review_camera']
- # project_code = ftrack_data['Project']['code']
- # task_type = ftrack_data['Task']['type']
- #
- # # load Preset
- # studio_repos = os.path.abspath(os.environ.get('studio_repos'))
- # shot_preset_path = os.path.join(studio_repos, 'maya',
- # 'capture_gui_presets',
- # (project_code + '_' + task_type + '_' + asset + '.json'))
- #
- # task_preset_path = os.path.join(studio_repos, 'maya',
- # 'capture_gui_presets',
- # (project_code + '_' + task_type + '.json'))
- #
- # project_preset_path = os.path.join(studio_repos, 'maya',
- # 'capture_gui_presets',
- # (project_code + '.json'))
- #
- # default_preset_path = os.path.join(studio_repos, 'maya',
- # 'capture_gui_presets',
- # 'default.json')
- #
- # if os.path.isfile(shot_preset_path):
- # preset_to_use = shot_preset_path
- # elif os.path.isfile(task_preset_path):
- # preset_to_use = task_preset_path
- # elif os.path.isfile(project_preset_path):
- # preset_to_use = project_preset_path
- # else:
- # preset_to_use = default_preset_path
-
capture_preset = ""
capture_preset = instance.context.data['presets']['maya']['capture']
try:
@@ -126,7 +78,18 @@ class ExtractThumbnail(pype.api.Extractor):
pm.currentTime(refreshFrameInt, edit=True)
with maintained_time():
- playblast = capture_gui.lib.capture_scene(preset)
+ filename = preset.get("filename", "%TEMP%")
+
+ # Force viewer to False in call to capture because we have our own
+ # viewer opening call to allow a signal to trigger between
+ # playblast and viewer
+ preset['viewer'] = False
+
+ # Remove panel key since it's internal value to capture_gui
+ preset.pop("panel", None)
+
+ path = capture.capture(**preset)
+ playblast = self._fix_playblast_output_path(path)
_, thumbnail = os.path.split(playblast)
@@ -144,6 +107,45 @@ class ExtractThumbnail(pype.api.Extractor):
}
instance.data["representations"].append(representation)
+ def _fix_playblast_output_path(self, filepath):
+ """Workaround a bug in maya.cmds.playblast to return correct filepath.
+
+ When the `viewer` argument is set to False and maya.cmds.playblast
+ does not automatically open the playblasted file the returned
+ filepath does not have the file's extension added correctly.
+
+ To workaround this we just glob.glob() for any file extensions and
+ assume the latest modified file is the correct file and return it.
+
+ """
+ # Catch cancelled playblast
+ if filepath is None:
+ self.log.warning("Playblast did not result in output path. "
+ "Playblast is probably interrupted.")
+ return None
+
+ # Fix: playblast not returning correct filename (with extension)
+ # Lets assume the most recently modified file is the correct one.
+ if not os.path.exists(filepath):
+ directory = os.path.dirname(filepath)
+ filename = os.path.basename(filepath)
+ # check if the filepath is has frame based filename
+ # example : capture.####.png
+ parts = filename.split(".")
+ if len(parts) == 3:
+ query = os.path.join(directory, "{}.*.{}".format(parts[0],
+ parts[-1]))
+ files = glob.glob(query)
+ else:
+ files = glob.glob("{}.*".format(filepath))
+
+ if not files:
+ raise RuntimeError("Couldn't find playblast from: "
+ "{0}".format(filepath))
+ filepath = max(files, key=os.path.getmtime)
+
+ return filepath
+
@contextlib.contextmanager
def maintained_time():
diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py
index 55c04e9c41..e3fa79b1c8 100644
--- a/pype/plugins/maya/publish/submit_maya_deadline.py
+++ b/pype/plugins/maya/publish/submit_maya_deadline.py
@@ -228,80 +228,19 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"AuxFiles": []
}
- # Include critical environment variables with submission
+ # We need those to pass them to pype for it to set correct context
keys = [
- # This will trigger `userSetup.py` on the slave
- # such that proper initialisation happens the same
- # way as it does on a local machine.
- # TODO(marcus): This won't work if the slaves don't
- # have accesss to these paths, such as if slaves are
- # running Linux and the submitter is on Windows.
- "PYTHONPATH",
- "PATH",
-
- "MTOA_EXTENSIONS_PATH",
- "MTOA_EXTENSIONS",
- "DYLD_LIBRARY_PATH",
- "MAYA_RENDER_DESC_PATH",
- "MAYA_MODULE_PATH",
- "ARNOLD_PLUGIN_PATH",
- "AVALON_SCHEMA",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
- "PYBLISHPLUGINPATH",
-
- # todo: This is a temporary fix for yeti variables
- "PEREGRINEL_LICENSE",
- "SOLIDANGLE_LICENSE",
- "ARNOLD_LICENSE"
- "MAYA_MODULE_PATH",
- "TOOL_ENV"
+ "AVALON_PROJECT",
+ "AVALON_ASSET",
+ "AVALON_TASK",
+ "PYPE_USERNAME"
]
+
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
- # self.log.debug("enviro: {}".format(pprint(environment)))
- for path in os.environ:
- if path.lower().startswith('pype_'):
- environment[path] = os.environ[path]
-
- environment["PATH"] = os.environ["PATH"]
- # self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
- clean_environment = {}
- for key in environment:
- clean_path = ""
- self.log.debug("key: {}".format(key))
- self.log.debug("value: {}".format(environment[key]))
- to_process = str(environment[key])
- if key == "PYPE_STUDIO_CORE_MOUNT":
- clean_path = to_process
- elif "://" in to_process:
- clean_path = to_process
- elif os.pathsep not in str(to_process):
- try:
- path = to_process
- path.decode('UTF-8', 'strict')
- clean_path = os.path.normpath(path)
- except UnicodeDecodeError:
- print('path contains non UTF characters')
- else:
- for path in to_process.split(os.pathsep):
- try:
- path.decode('UTF-8', 'strict')
- clean_path += os.path.normpath(path) + os.pathsep
- except UnicodeDecodeError:
- print('path contains non UTF characters')
-
- if key == "PYTHONPATH":
- clean_path = clean_path.replace('python2', 'python3')
- clean_path = clean_path.replace(
- os.path.normpath(
- environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
- os.path.normpath(
- environment['PYPE_STUDIO_CORE_PATH'])) # noqa
- clean_environment[key] = clean_path
-
- environment = clean_environment
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@@ -319,7 +258,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.preflight_check(instance)
- self.log.info("Submitting..")
+ self.log.info("Submitting ...")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py
index 767e92b592..2016c66095 100644
--- a/pype/plugins/nuke/create/create_backdrop.py
+++ b/pype/plugins/nuke/create/create_backdrop.py
@@ -35,8 +35,10 @@ class CreateBackdrop(Creator):
return instance
else:
- nuke.message("Please select nodes you "
- "wish to add to a container")
+ msg = "Please select nodes you "
+ "wish to add to a container"
+ self.log.error(msg)
+ nuke.message(msg)
return
else:
bckd_node = autoBackdrop()
diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py
index 41229862e3..ca199b8800 100644
--- a/pype/plugins/nuke/create/create_gizmo.py
+++ b/pype/plugins/nuke/create/create_gizmo.py
@@ -36,8 +36,10 @@ class CreateGizmo(Creator):
node["tile_color"].setValue(int(self.node_color, 16))
return anlib.imprint(node, self.data)
else:
- nuke.message("Please select a group node "
- "you wish to publish as the gizmo")
+ msg = ("Please select a group node "
+ "you wish to publish as the gizmo")
+ self.log.error(msg)
+ nuke.message(msg)
if len(nodes) >= 2:
anlib.select_nodes(nodes)
@@ -58,8 +60,10 @@ class CreateGizmo(Creator):
return anlib.imprint(gizmo_node, self.data)
else:
- nuke.message("Please select nodes you "
- "wish to add to the gizmo")
+ msg = ("Please select nodes you "
+ "wish to add to the gizmo")
+ self.log.error(msg)
+ nuke.message(msg)
return
else:
with anlib.maintained_selection():
diff --git a/pype/plugins/nuke/create/create_read.py b/pype/plugins/nuke/create/create_read.py
index 1aa7e68746..70db580a7e 100644
--- a/pype/plugins/nuke/create/create_read.py
+++ b/pype/plugins/nuke/create/create_read.py
@@ -34,7 +34,9 @@ class CrateRead(avalon.nuke.Creator):
nodes = self.nodes
if not nodes or len(nodes) == 0:
- nuke.message('Please select Read node')
+ msg = "Please select Read node"
+ self.log.error(msg)
+ nuke.message(msg)
else:
count_reads = 0
for node in nodes:
@@ -46,7 +48,9 @@ class CrateRead(avalon.nuke.Creator):
count_reads += 1
if count_reads < 1:
- nuke.message('Please select Read node')
+ msg = "Please select Read node"
+ self.log.error(msg)
+ nuke.message(msg)
return
def change_read_node(self, name, node, data):
diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py
index a85408cab3..74e450f267 100644
--- a/pype/plugins/nuke/create/create_write.py
+++ b/pype/plugins/nuke/create/create_write.py
@@ -41,9 +41,11 @@ class CreateWriteRender(plugin.PypeCreator):
if (self.options or {}).get("useSelection"):
nodes = self.nodes
- assert len(nodes) < 2, self.log.error(
- "Select only one node. The node you want to connect to, "
- "or tick off `Use selection`")
+ if not (len(nodes) < 2):
+ msg = ("Select only one node. The node you want to connect to, "
+ "or tick off `Use selection`")
+ log.error(msg)
+ nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
@@ -134,7 +136,11 @@ class CreateWritePrerender(plugin.PypeCreator):
if (self.options or {}).get("useSelection"):
nodes = self.nodes
- assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`")
+ if not (len(nodes) < 2):
+ msg = ("Select only one node. The node you want to connect to, "
+ "or tick off `Use selection`")
+ self.log.error(msg)
+ nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py
index 7f58d4e9ec..07a6724771 100644
--- a/pype/plugins/nuke/load/load_backdrop.py
+++ b/pype/plugins/nuke/load/load_backdrop.py
@@ -256,8 +256,11 @@ class LoadBackdropNodes(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
- self.log.error("Please create Viewer node before you "
- "run this action again")
+ if not (len(nodes) < 2):
+ msg = "Please create Viewer node before you "
+ "run this action again"
+ self.log.error(msg)
+ nuke.message(msg)
return None
# get coordinates of Viewer1
diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py
index 0d78c14214..23d7ef2f4a 100644
--- a/pype/plugins/nuke/load/load_gizmo_ip.py
+++ b/pype/plugins/nuke/load/load_gizmo_ip.py
@@ -176,8 +176,10 @@ class LoadGizmoInputProcess(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
- self.log.error("Please create Viewer node before you "
- "run this action again")
+ msg = "Please create Viewer node before you "
+ "run this action again"
+ self.log.error(msg)
+ nuke.message(msg)
return None
# get coordinates of Viewer1
diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py
index 5f09adb05f..2b38a9ff08 100644
--- a/pype/plugins/nuke/load/load_luts_ip.py
+++ b/pype/plugins/nuke/load/load_luts_ip.py
@@ -276,7 +276,10 @@ class LoadLutsInputProcess(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
- self.log.error("Please create Viewer node before you run this action again")
+ msg = "Please create Viewer node before you "
+ "run this action again"
+ self.log.error(msg)
+ nuke.message(msg)
return None
# get coordinates of Viewer1
diff --git a/pype/plugins/nuke/load/load_matchmove.py b/pype/plugins/nuke/load/load_matchmove.py
index 6a674368fb..60d5dc026f 100644
--- a/pype/plugins/nuke/load/load_matchmove.py
+++ b/pype/plugins/nuke/load/load_matchmove.py
@@ -1,4 +1,5 @@
from avalon import api
+import nuke
class MatchmoveLoader(api.Loader):
@@ -19,6 +20,8 @@ class MatchmoveLoader(api.Loader):
exec(open(self.fname).read())
else:
- self.log.error("Unsupported script type")
+ msg = "Unsupported script type"
+ self.log.error(msg)
+ nuke.message(msg)
return True
diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py
index 8f01d4511b..76599c3351 100644
--- a/pype/plugins/nuke/load/load_sequence.py
+++ b/pype/plugins/nuke/load/load_sequence.py
@@ -73,7 +73,7 @@ class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["write", "source", "plate", "render"]
- representations = ["exr", "dpx", "jpg", "jpeg"]
+ representations = ["exr", "dpx", "jpg", "jpeg", "png"]
label = "Load sequence"
order = -10
diff --git a/pype/plugins/nuke/publish/collect_workfile.py b/pype/plugins/nuke/publish/collect_workfile.py
index 4fff9f46ed..9c01a3ec97 100644
--- a/pype/plugins/nuke/publish/collect_workfile.py
+++ b/pype/plugins/nuke/publish/collect_workfile.py
@@ -72,8 +72,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"publish": root.knob('publish').value(),
"family": family,
"families": [family],
- "representations": list(),
- "subsetGroup": "workfiles"
+ "representations": list()
})
# adding basic script data
diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py
index 37c86978b6..3eff527d47 100644
--- a/pype/plugins/nuke/publish/collect_writes.py
+++ b/pype/plugins/nuke/publish/collect_writes.py
@@ -127,8 +127,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"families": families,
"colorspace": node["colorspace"].value(),
"deadlineChunkSize": deadlineChunkSize,
- "deadlinePriority": deadlinePriority,
- "subsetGroup": "renders"
+ "deadlinePriority": deadlinePriority
})
self.log.debug("instance.data: {}".format(instance.data))
diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py
index 4373309363..90b1fda1ec 100644
--- a/pype/plugins/nuke/publish/extract_review_data_lut.py
+++ b/pype/plugins/nuke/publish/extract_review_data_lut.py
@@ -41,7 +41,7 @@ class ExtractReviewDataLut(pype.api.Extractor):
with anlib.maintained_selection():
exporter = pnlib.ExporterReviewLut(
self, instance
- )
+ )
data = exporter.generate_lut()
# assign to representations
diff --git a/pype/plugins/nuke/publish/validate_output_resolution.py b/pype/plugins/nuke/publish/validate_output_resolution.py
new file mode 100644
index 0000000000..2563ee929f
--- /dev/null
+++ b/pype/plugins/nuke/publish/validate_output_resolution.py
@@ -0,0 +1,78 @@
+import nuke
+
+import pyblish.api
+
+
+class RepairWriteResolutionDifference(pyblish.api.Action):
+
+ label = "Repair"
+ icon = "wrench"
+ on = "failed"
+
+ def process(self, context, plugin):
+
+ # Get the errored instances
+ failed = []
+ for result in context.data["results"]:
+ if (result["error"] is not None and result["instance"] is not None
+ and result["instance"] not in failed):
+ failed.append(result["instance"])
+
+ # Apply pyblish.logic to get the instances for the plug-in
+ instances = pyblish.api.instances_by_plugin(failed, plugin)
+
+ for instance in instances:
+ reformat = instance[0].dependencies()[0]
+ if reformat.Class() != "Reformat":
+ reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)])
+
+ xpos = instance[0].xpos()
+ ypos = instance[0].ypos() - 26
+
+ dependent_ypos = instance[0].dependencies()[0].ypos()
+ if (instance[0].ypos() - dependent_ypos) <= 51:
+ xpos += 110
+
+ reformat.setXYpos(xpos, ypos)
+
+ instance[0].setInput(0, reformat)
+
+ reformat["resize"].setValue("none")
+
+
+class ValidateOutputResolution(pyblish.api.InstancePlugin):
+ """Validates Output Resolution.
+
+ It is making sure the resolution of write's input is the same as
+ Format definition of script in Root node.
+ """
+
+ order = pyblish.api.ValidatorOrder
+ optional = True
+ families = ["render", "render.local", "render.farm"]
+ label = "Write Resolution"
+ hosts = ["nuke"]
+ actions = [RepairWriteResolutionDifference]
+
+ def process(self, instance):
+
+ # Skip bounding box check if a crop node exists.
+ if instance[0].dependencies()[0].Class() == "Crop":
+ return
+
+ msg = "Bounding box is outside the format."
+ assert self.check_resolution(instance), msg
+
+ def check_resolution(self, instance):
+ node = instance[0]
+
+ root_width = instance.data["resolutionWidth"]
+ root_height = instance.data["resolutionHeight"]
+
+ write_width = node.format().width()
+ write_height = node.format().height()
+
+ if (root_width != write_width) or (root_height != write_height):
+ return None
+ else:
+ return True
diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py
index c63c289947..169ea1ecb5 100644
--- a/pype/plugins/nuke/publish/validate_rendered_frames.py
+++ b/pype/plugins/nuke/publish/validate_rendered_frames.py
@@ -41,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if not repre.get('files'):
msg = ("no frames were collected, "
"you need to render them")
- self.log.warning(msg)
+ self.log.error(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])
@@ -75,7 +75,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
self.log.info(
'len(collection.indexes): {}'.format(collected_frames_len)
)
-
+
if "slate" in instance.data["families"]:
collected_frames_len -= 1
diff --git a/pype/plugins/nuke/publish/validate_write_bounding_box.py b/pype/plugins/nuke/publish/validate_write_bounding_box.py
index 417d4ab004..e4b7c77a25 100644
--- a/pype/plugins/nuke/publish/validate_write_bounding_box.py
+++ b/pype/plugins/nuke/publish/validate_write_bounding_box.py
@@ -57,7 +57,7 @@ class ValidateNukeWriteBoundingBox(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder
optional = True
- families = ["render"]
+ families = ["render", "render.local", "render.farm"]
label = "Write Bounding Box"
hosts = ["nuke"]
actions = [RepairNukeBoundingBoxAction]
diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py
index 0729f20957..3759d50f6a 100644
--- a/pype/plugins/nukestudio/publish/collect_clips.py
+++ b/pype/plugins/nukestudio/publish/collect_clips.py
@@ -106,8 +106,8 @@ class CollectClips(api.ContextPlugin):
"family": "clip",
"families": [],
"handles": 0,
- "handleStart": projectdata.get("handles", 0),
- "handleEnd": projectdata.get("handles", 0),
+ "handleStart": projectdata.get("handleStart", 0),
+ "handleEnd": projectdata.get("handleEnd", 0),
"version": int(version)})
instance = context.create_instance(**data)
diff --git a/pype/plugins/standalonepublisher/publish/collect_matchmove.py b/pype/plugins/standalonepublisher/publish/collect_matchmove.py
index b46efc1cf3..5d9e8ddfb4 100644
--- a/pype/plugins/standalonepublisher/publish/collect_matchmove.py
+++ b/pype/plugins/standalonepublisher/publish/collect_matchmove.py
@@ -21,7 +21,7 @@ class CollectMatchmovePublish(pyblish.api.InstancePlugin):
label = "Collect Matchmove - SA Publish"
order = pyblish.api.CollectorOrder
- family = ["matchmove"]
+ families = ["matchmove"]
hosts = ["standalonepublisher"]
def process(self, instance):
diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py
index 5517cfeb4c..620ee3d851 100644
--- a/pype/scripts/publish_filesequence.py
+++ b/pype/scripts/publish_filesequence.py
@@ -1,9 +1,12 @@
"""This module is used for command line publishing of image sequences."""
import os
+import sys
+import argparse
import logging
import subprocess
import platform
+
try:
from shutil import which
except ImportError:
@@ -23,7 +26,6 @@ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
def __main__():
- import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--paths",
nargs="*",
@@ -43,7 +45,11 @@ def __main__():
print("Running pype ...")
auto_pype_root = os.path.dirname(os.path.abspath(__file__))
auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..")
+
auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root
+ if os.environ.get('PYPE_ROOT'):
+ print("Got Pype location from environment: {}".format(
+ os.environ.get('PYPE_ROOT')))
pype_command = "pype.ps1"
if platform.system().lower() == "linux":
@@ -69,7 +75,7 @@ def __main__():
print("Set pype root to: {}".format(pype_root))
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
- paths = kwargs.paths or [os.getcwd()]
+ paths = kwargs.paths or [os.environ.get("PYPE_METADATA_FILE")] or [os.getcwd()] # noqa
args = [
os.path.join(pype_root, pype_command),
diff --git a/pype/vendor/ftrack_api_old/_version.py b/pype/vendor/ftrack_api_old/_version.py
index 07f744ca5d..aa1a8c4aba 100644
--- a/pype/vendor/ftrack_api_old/_version.py
+++ b/pype/vendor/ftrack_api_old/_version.py
@@ -1 +1 @@
-__version__ = '1.3.3'
+__version__ = '1.8.2'
diff --git a/pype/vendor/ftrack_api_old/_weakref.py b/pype/vendor/ftrack_api_old/_weakref.py
new file mode 100644
index 0000000000..69cc6f4b4f
--- /dev/null
+++ b/pype/vendor/ftrack_api_old/_weakref.py
@@ -0,0 +1,66 @@
+"""
+Yet another backport of WeakMethod for Python 2.7.
+Changes include removing exception chaining and adding args to super() calls.
+
+Copyright (c) 2001-2019 Python Software Foundation.All rights reserved.
+
+Full license available in LICENSE.python.
+"""
+from weakref import ref
+
+
+class WeakMethod(ref):
+ """
+ A custom `weakref.ref` subclass which simulates a weak reference to
+ a bound method, working around the lifetime problem of bound methods.
+ """
+
+ __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__"
+
+ def __new__(cls, meth, callback=None):
+ try:
+ obj = meth.__self__
+ func = meth.__func__
+ except AttributeError:
+ raise TypeError(
+ "argument should be a bound method, not {}".format(type(meth))
+ )
+
+ def _cb(arg):
+ # The self-weakref trick is needed to avoid creating a reference
+ # cycle.
+ self = self_wr()
+ if self._alive:
+ self._alive = False
+ if callback is not None:
+ callback(self)
+
+ self = ref.__new__(cls, obj, _cb)
+ self._func_ref = ref(func, _cb)
+ self._meth_type = type(meth)
+ self._alive = True
+ self_wr = ref(self)
+ return self
+
+ def __call__(self):
+ obj = super(WeakMethod, self).__call__()
+ func = self._func_ref()
+ if obj is None or func is None:
+ return None
+ return self._meth_type(func, obj)
+
+ def __eq__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is other
+ return ref.__eq__(self, other) and self._func_ref == other._func_ref
+ return NotImplemented
+
+ def __ne__(self, other):
+ if isinstance(other, WeakMethod):
+ if not self._alive or not other._alive:
+ return self is not other
+ return ref.__ne__(self, other) or self._func_ref != other._func_ref
+ return NotImplemented
+
+ __hash__ = ref.__hash__
diff --git a/pype/vendor/ftrack_api_old/attribute.py b/pype/vendor/ftrack_api_old/attribute.py
index 66840bed66..47fd6c9616 100644
--- a/pype/vendor/ftrack_api_old/attribute.py
+++ b/pype/vendor/ftrack_api_old/attribute.py
@@ -148,7 +148,8 @@ class Attribute(object):
'''A name and value pair persisted remotely.'''
def __init__(
- self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True
+ self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True,
+ computed=False
):
'''Initialise attribute with *name*.
@@ -161,10 +162,14 @@ class Attribute(object):
are :attr:`ftrack_api_old.symbol.NOT_SET`. The exception to this is when the
target value is also :attr:`ftrack_api_old.symbol.NOT_SET`.
+ If *computed* is set to True the value is a remote side computed value
+ and should not be long-term cached.
+
'''
super(Attribute, self).__init__()
self._name = name
self._mutable = mutable
+ self._computed = computed
self.default_value = default_value
self._local_key = 'local'
@@ -205,6 +210,11 @@ class Attribute(object):
'''Return whether attribute is mutable.'''
return self._mutable
+ @property
+ def computed(self):
+ '''Return whether attribute is computed.'''
+ return self._computed
+
def get_value(self, entity):
'''Return current value for *entity*.
diff --git a/pype/vendor/ftrack_api_old/entity/factory.py b/pype/vendor/ftrack_api_old/entity/factory.py
index 16721514bd..f47c92e563 100644
--- a/pype/vendor/ftrack_api_old/entity/factory.py
+++ b/pype/vendor/ftrack_api_old/entity/factory.py
@@ -49,9 +49,11 @@ class Factory(object):
# Build attributes for class.
attributes = ftrack_api_old.attribute.Attributes()
- immutable = schema.get('immutable', [])
+ immutable_properties = schema.get('immutable', [])
+ computed_properties = schema.get('computed', [])
for name, fragment in schema.get('properties', {}).items():
- mutable = name not in immutable
+ mutable = name not in immutable_properties
+ computed = name in computed_properties
default = fragment.get('default', ftrack_api_old.symbol.NOT_SET)
if default == '{uid}':
@@ -62,7 +64,8 @@ class Factory(object):
if data_type is not ftrack_api_old.symbol.NOT_SET:
if data_type in (
- 'string', 'boolean', 'integer', 'number', 'variable'
+ 'string', 'boolean', 'integer', 'number', 'variable',
+ 'object'
):
# Basic scalar attribute.
if data_type == 'number':
@@ -74,7 +77,7 @@ class Factory(object):
data_type = 'datetime'
attribute = self.create_scalar_attribute(
- class_name, name, mutable, default, data_type
+ class_name, name, mutable, computed, default, data_type
)
if attribute:
attributes.add(attribute)
@@ -139,11 +142,12 @@ class Factory(object):
return cls
def create_scalar_attribute(
- self, class_name, name, mutable, default, data_type
+ self, class_name, name, mutable, computed, default, data_type
):
'''Return appropriate scalar attribute instance.'''
return ftrack_api_old.attribute.ScalarAttribute(
- name, data_type=data_type, default_value=default, mutable=mutable
+ name, data_type=data_type, default_value=default, mutable=mutable,
+ computed=computed
)
def create_reference_attribute(self, class_name, name, mutable, reference):
diff --git a/pype/vendor/ftrack_api_old/entity/location.py b/pype/vendor/ftrack_api_old/entity/location.py
index d48264abc2..8d9d52c654 100644
--- a/pype/vendor/ftrack_api_old/entity/location.py
+++ b/pype/vendor/ftrack_api_old/entity/location.py
@@ -526,7 +526,8 @@ class Location(ftrack_api_old.entity.base.Entity):
for index, resource_identifier in enumerate(resource_identifiers):
resource_identifiers[index] = (
self.resource_identifier_transformer.decode(
- resource_identifier
+ resource_identifier,
+ context={'component': components[index]}
)
)
diff --git a/pype/vendor/ftrack_api_old/entity/note.py b/pype/vendor/ftrack_api_old/entity/note.py
index 4cacf6ac8a..c628886fd9 100644
--- a/pype/vendor/ftrack_api_old/entity/note.py
+++ b/pype/vendor/ftrack_api_old/entity/note.py
@@ -1,6 +1,8 @@
# :coding: utf-8
# :copyright: Copyright (c) 2015 ftrack
+import warnings
+
import ftrack_api_old.entity.base
@@ -33,26 +35,52 @@ class Note(ftrack_api_old.entity.base.Entity):
class CreateNoteMixin(object):
'''Mixin to add create_note method on entity class.'''
- def create_note(self, content, author, recipients=None, category=None):
+ def create_note(
+ self, content, author, recipients=None, category=None, labels=None
+ ):
'''Create note with *content*, *author*.
- Note category can be set by including *category* and *recipients*
- can be specified as a list of user or group instances.
+ NoteLabels can be set by including *labels*.
+
+ Note category can be set by including *category*.
+
+ *recipients* can be specified as a list of user or group instances.
'''
+ note_label_support = 'NoteLabel' in self.session.types
+
+ if not labels:
+ labels = []
+
+ if labels and not note_label_support:
+ raise ValueError(
+ 'NoteLabel is not supported by the current server version.'
+ )
+
+ if category and labels:
+ raise ValueError(
+ 'Both category and labels cannot be set at the same time.'
+ )
+
if not recipients:
recipients = []
- category_id = None
- if category:
- category_id = category['id']
-
data = {
'content': content,
- 'author': author,
- 'category_id': category_id
+ 'author': author
}
+ if category:
+ if note_label_support:
+ labels = [category]
+ warnings.warn(
+ 'category argument will be removed in an upcoming version, '
+ 'please use labels instead.',
+ PendingDeprecationWarning
+ )
+ else:
+ data['category_id'] = category['id']
+
note = self.session.create('Note', data)
self['notes'].append(note)
@@ -65,4 +93,13 @@ class CreateNoteMixin(object):
note['recipients'].append(recipient)
+ for label in labels:
+ self.session.create(
+ 'NoteLabelLink',
+ {
+ 'label_id': label['id'],
+ 'note_id': note['id']
+ }
+ )
+
return note
diff --git a/pype/vendor/ftrack_api_old/event/expression.py b/pype/vendor/ftrack_api_old/event/expression.py
index e10cd85844..8de4be0d71 100644
--- a/pype/vendor/ftrack_api_old/event/expression.py
+++ b/pype/vendor/ftrack_api_old/event/expression.py
@@ -3,14 +3,15 @@
from operator import eq, ne, ge, le, gt, lt
-from pyparsing import (ParserElement, Group, Word, CaselessKeyword, Forward,
+from pyparsing import (Group, Word, CaselessKeyword, Forward,
FollowedBy, Suppress, oneOf, OneOrMore, Optional,
alphanums, quotedString, removeQuotes)
import ftrack_api_old.exception
-# Optimise parsing using packrat memoisation feature.
-ParserElement.enablePackrat()
+# Do not enable packrat since it is not thread-safe and will result in parsing
+# exceptions in a multi threaded environment.
+# ParserElement.enablePackrat()
class Parser(object):
diff --git a/pype/vendor/ftrack_api_old/event/hub.py b/pype/vendor/ftrack_api_old/event/hub.py
index 25410aa1e1..3ffbd38056 100644
--- a/pype/vendor/ftrack_api_old/event/hub.py
+++ b/pype/vendor/ftrack_api_old/event/hub.py
@@ -14,6 +14,7 @@ import operator
import functools
import json
import socket
+import warnings
import requests
import requests.exceptions
@@ -40,9 +41,20 @@ ServerDetails = collections.namedtuple('ServerDetails', [
])
+
+
class EventHub(object):
'''Manage routing of events.'''
+ _future_signature_warning = (
+ 'When constructing your Session object you did not explicitly define '
+ 'auto_connect_event_hub as True even though you appear to be publishing '
+ 'and / or subscribing to asynchronous events. In version version 2.0 of '
+ 'the ftrack-python-api the default behavior will change from True '
+ 'to False. Please make sure to update your tools. You can read more at '
+ 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html'
+ )
+
def __init__(self, server_url, api_user, api_key):
'''Initialise hub, connecting to ftrack *server_url*.
@@ -76,6 +88,8 @@ class EventHub(object):
self._auto_reconnect_attempts = 30
self._auto_reconnect_delay = 10
+ self._deprecation_warning_auto_connect = False
+
# Mapping of Socket.IO codes to meaning.
self._code_name_mapping = {
'0': 'disconnect',
@@ -134,6 +148,9 @@ class EventHub(object):
connected or connection fails.
'''
+
+ self._deprecation_warning_auto_connect = False
+
if self.connected:
raise ftrack_api_old.exception.EventHubConnectionError(
'Already connected.'
@@ -164,17 +181,26 @@ class EventHub(object):
# https://docs.python.org/2/library/socket.html#socket.socket.setblocking
self._connection = websocket.create_connection(url, timeout=60)
- except Exception:
+ except Exception as error:
+ error_message = (
+ 'Failed to connect to event server at {server_url} with '
+ 'error: "{error}".'
+ )
+
+ error_details = {
+ 'error': unicode(error),
+ 'server_url': self.get_server_url()
+ }
+
self.logger.debug(
L(
- 'Error connecting to event server at {0}.',
- self.get_server_url()
+ error_message, **error_details
),
exc_info=1
)
raise ftrack_api_old.exception.EventHubConnectionError(
- 'Failed to connect to event server at {0}.'
- .format(self.get_server_url())
+ error_message,
+ details=error_details
)
# Start background processing thread.
@@ -543,6 +569,11 @@ class EventHub(object):
event will be caught by this method and ignored.
'''
+ if self._deprecation_warning_auto_connect and not synchronous:
+ warnings.warn(
+ self._future_signature_warning, FutureWarning
+ )
+
try:
return self._publish(
event, synchronous=synchronous, on_reply=on_reply
@@ -700,18 +731,23 @@ class EventHub(object):
# Automatically publish a non None response as a reply when not in
# synchronous mode.
- if not synchronous and response is not None:
-
- try:
- self.publish_reply(
- event, data=response, source=subscriber.metadata
+ if not synchronous:
+ if self._deprecation_warning_auto_connect:
+ warnings.warn(
+ self._future_signature_warning, FutureWarning
)
- except Exception:
- self.logger.exception(L(
- 'Error publishing response {0} from subscriber {1} '
- 'for event {2}.', response, subscriber, event
- ))
+ if response is not None:
+ try:
+ self.publish_reply(
+ event, data=response, source=subscriber.metadata
+ )
+
+ except Exception:
+ self.logger.exception(L(
+ 'Error publishing response {0} from subscriber {1} '
+ 'for event {2}.', response, subscriber, event
+ ))
# Check whether to continue processing topic event.
if event.is_stopped():
@@ -881,6 +917,7 @@ class EventHub(object):
if code_name == 'connect':
self.logger.debug('Connected to event server.')
event = ftrack_api_old.event.base.Event('ftrack.meta.connected')
+ self._prepare_event(event)
self._event_queue.put(event)
elif code_name == 'disconnect':
@@ -901,6 +938,7 @@ class EventHub(object):
if not self.connected:
event = ftrack_api_old.event.base.Event('ftrack.meta.disconnected')
+ self._prepare_event(event)
self._event_queue.put(event)
elif code_name == 'heartbeat':
diff --git a/pype/vendor/ftrack_api_old/logging.py b/pype/vendor/ftrack_api_old/logging.py
index 2b28ce900b..41969c5b2a 100644
--- a/pype/vendor/ftrack_api_old/logging.py
+++ b/pype/vendor/ftrack_api_old/logging.py
@@ -1,6 +1,23 @@
# :coding: utf-8
# :copyright: Copyright (c) 2016 ftrack
+import functools
+import warnings
+
+
+def deprecation_warning(message):
+ def decorator(function):
+ @functools.wraps(function)
+ def wrapper(*args, **kwargs):
+ warnings.warn(
+ message,
+ PendingDeprecationWarning
+ )
+ return function(*args, **kwargs)
+ return wrapper
+
+ return decorator
+
class LazyLogMessage(object):
'''A log message that can be evaluated lazily for improved performance.
diff --git a/pype/vendor/ftrack_api_old/session.py b/pype/vendor/ftrack_api_old/session.py
index c313203a0c..0986962ca4 100644
--- a/pype/vendor/ftrack_api_old/session.py
+++ b/pype/vendor/ftrack_api_old/session.py
@@ -16,6 +16,7 @@ import hashlib
import tempfile
import threading
import atexit
+import warnings
import requests
import requests.auth
@@ -42,8 +43,14 @@ import ftrack_api_old.structure.origin
import ftrack_api_old.structure.entity_id
import ftrack_api_old.accessor.server
import ftrack_api_old._centralized_storage_scenario
+import ftrack_api_old.logging
from ftrack_api_old.logging import LazyLogMessage as L
+try:
+ from weakref import WeakMethod
+except ImportError:
+ from ftrack_api_old._weakref import WeakMethod
+
class SessionAuthentication(requests.auth.AuthBase):
'''Attach ftrack session authentication information to requests.'''
@@ -69,7 +76,7 @@ class Session(object):
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
- auto_connect_event_hub=True, schema_cache_path=None,
+ auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None
):
'''Initialise session.
@@ -233,7 +240,8 @@ class Session(object):
self._api_key
)
- if auto_connect_event_hub:
+ self._auto_connect_event_hub_thread = None
+ if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
@@ -242,8 +250,14 @@ class Session(object):
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
+ # To help with migration from auto_connect_event_hub default changing
+ # from True to False.
+ self._event_hub._deprecation_warning_auto_connect = (
+ auto_connect_event_hub is None
+ )
+
# Register to auto-close session on exit.
- atexit.register(self.close)
+ atexit.register(WeakMethod(self.close))
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
@@ -271,6 +285,15 @@ class Session(object):
ftrack_api_old._centralized_storage_scenario.register(self)
self._configure_locations()
+ self.event_hub.publish(
+ ftrack_api_old.event.base.Event(
+ topic='ftrack.api.session.ready',
+ data=dict(
+ session=self
+ )
+ ),
+ synchronous=True
+ )
def __enter__(self):
'''Return session as context manager.'''
@@ -389,7 +412,8 @@ class Session(object):
try:
self.event_hub.disconnect()
- self._auto_connect_event_hub_thread.join()
+ if self._auto_connect_event_hub_thread:
+ self._auto_connect_event_hub_thread.join()
except ftrack_api_old.exception.EventHubConnectionError:
pass
@@ -428,6 +452,16 @@ class Session(object):
# Re-configure certain session aspects that may be dependant on cache.
self._configure_locations()
+ self.event_hub.publish(
+ ftrack_api_old.event.base.Event(
+ topic='ftrack.api.session.reset',
+ data=dict(
+ session=self
+ )
+ ),
+ synchronous=True
+ )
+
def auto_populating(self, auto_populate):
'''Temporarily set auto populate to *auto_populate*.
@@ -508,7 +542,7 @@ class Session(object):
'entity_key': entity.get('id')
})
- result = self._call(
+ result = self.call(
[payload]
)
@@ -790,12 +824,13 @@ class Session(object):
}]
# TODO: When should this execute? How to handle background=True?
- results = self._call(batch)
+ results = self.call(batch)
# Merge entities into local cache and return merged entities.
data = []
+ merged = dict()
for entity in results[0]['data']:
- data.append(self.merge(entity))
+ data.append(self._merge_recursive(entity, merged))
return data, results[0]['metadata']
@@ -856,6 +891,48 @@ class Session(object):
else:
return value
+ def _merge_recursive(self, entity, merged=None):
+ '''Merge *entity* and all its attributes recursivly.'''
+ log_debug = self.logger.isEnabledFor(logging.DEBUG)
+
+ if merged is None:
+ merged = {}
+
+ attached = self.merge(entity, merged)
+
+ for attribute in entity.attributes:
+ # Remote attributes.
+ remote_value = attribute.get_remote_value(entity)
+
+ if isinstance(
+ remote_value,
+ (
+ ftrack_api_old.entity.base.Entity,
+ ftrack_api_old.collection.Collection,
+ ftrack_api_old.collection.MappedCollectionProxy
+ )
+ ):
+ log_debug and self.logger.debug(
+ 'Merging remote value for attribute {0}.'.format(attribute)
+ )
+
+ if isinstance(remote_value, ftrack_api_old.entity.base.Entity):
+ self._merge_recursive(remote_value, merged=merged)
+
+ elif isinstance(
+ remote_value, ftrack_api_old.collection.Collection
+ ):
+ for entry in remote_value:
+ self._merge_recursive(entry, merged=merged)
+
+ elif isinstance(
+ remote_value, ftrack_api_old.collection.MappedCollectionProxy
+ ):
+ for entry in remote_value.collection:
+ self._merge_recursive(entry, merged=merged)
+
+ return attached
+
def _merge_entity(self, entity, merged=None):
'''Merge *entity* into session returning merged entity.
@@ -1185,7 +1262,7 @@ class Session(object):
# Process batch.
if batch:
- result = self._call(batch)
+ result = self.call(batch)
# Clear recorded operations.
self.recorded_operations.clear()
@@ -1260,7 +1337,7 @@ class Session(object):
def _fetch_server_information(self):
'''Return server information.'''
- result = self._call([{'action': 'query_server_information'}])
+ result = self.call([{'action': 'query_server_information'}])
return result[0]
def _discover_plugins(self, plugin_arguments=None):
@@ -1362,7 +1439,7 @@ class Session(object):
'Loading schemas from server due to hash not matching.'
'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash
))
- schemas = self._call([{'action': 'query_schemas'}])[0]
+ schemas = self.call([{'action': 'query_schemas'}])[0]
if schema_cache_path:
try:
@@ -1525,8 +1602,24 @@ class Session(object):
synchronous=True
)
+ @ftrack_api_old.logging.deprecation_warning(
+ 'Session._call is now available as public method Session.call. The '
+ 'private method will be removed in version 2.0.'
+ )
def _call(self, data):
- '''Make request to server with *data*.'''
+ '''Make request to server with *data* batch describing the actions.
+
+ .. note::
+
+ This private method is now available as public method
+ :meth:`entity_reference`. This alias remains for backwards
+ compatibility, but will be removed in version 2.0.
+
+ '''
+ return self.call(data)
+
+ def call(self, data):
+ '''Make request to server with *data* batch describing the actions.'''
url = self._server_url + '/api'
headers = {
'content-type': 'application/json',
@@ -1553,7 +1646,7 @@ class Session(object):
'Server reported error in unexpected format. Raw error was: {0}'
.format(response.text)
)
- self.logger.error(error_message)
+ self.logger.exception(error_message)
raise ftrack_api_old.exception.ServerError(error_message)
else:
@@ -1562,7 +1655,7 @@ class Session(object):
error_message = 'Server reported error: {0}({1})'.format(
result['exception'], result['content']
)
- self.logger.error(error_message)
+ self.logger.exception(error_message)
raise ftrack_api_old.exception.ServerError(error_message)
return result
@@ -1620,12 +1713,12 @@ class Session(object):
if "entity_data" in data:
for key, value in data["entity_data"].items():
if isinstance(value, ftrack_api_old.entity.base.Entity):
- data["entity_data"][key] = self._entity_reference(value)
+ data["entity_data"][key] = self.entity_reference(value)
return data
if isinstance(item, ftrack_api_old.entity.base.Entity):
- data = self._entity_reference(item)
+ data = self.entity_reference(item)
with self.auto_populating(True):
@@ -1646,14 +1739,15 @@ class Session(object):
value = attribute.get_local_value(item)
elif entity_attribute_strategy == 'persisted_only':
- value = attribute.get_remote_value(item)
+ if not attribute.computed:
+ value = attribute.get_remote_value(item)
if value is not ftrack_api_old.symbol.NOT_SET:
if isinstance(
attribute, ftrack_api_old.attribute.ReferenceAttribute
):
if isinstance(value, ftrack_api_old.entity.base.Entity):
- value = self._entity_reference(value)
+ value = self.entity_reference(value)
data[attribute.name] = value
@@ -1668,14 +1762,14 @@ class Session(object):
if isinstance(item, ftrack_api_old.collection.Collection):
data = []
for entity in item:
- data.append(self._entity_reference(entity))
+ data.append(self.entity_reference(entity))
return data
raise TypeError('{0!r} is not JSON serializable'.format(item))
- def _entity_reference(self, entity):
- '''Return reference to *entity*.
+ def entity_reference(self, entity):
+ '''Return entity reference that uniquely identifies *entity*.
Return a mapping containing the __entity_type__ of the entity along with
the key, value pairs that make up it's primary key.
@@ -1689,6 +1783,26 @@ class Session(object):
return reference
+ @ftrack_api_old.logging.deprecation_warning(
+ 'Session._entity_reference is now available as public method '
+ 'Session.entity_reference. The private method will be removed '
+ 'in version 2.0.'
+ )
+ def _entity_reference(self, entity):
+ '''Return entity reference that uniquely identifies *entity*.
+
+ Return a mapping containing the __entity_type__ of the entity along
+ with the key, value pairs that make up it's primary key.
+
+ .. note::
+
+ This private method is now available as public method
+ :meth:`entity_reference`. This alias remains for backwards
+ compatibility, but will be removed in version 2.0.
+
+ '''
+ return self.entity_reference(entity)
+
def decode(self, string):
'''Return decoded JSON *string* as Python object.'''
with self.operation_recording(False):
@@ -2016,6 +2130,10 @@ class Session(object):
return availabilities
+ @ftrack_api_old.logging.deprecation_warning(
+ 'Session.delayed_job has been deprecated in favour of session.call. '
+ 'Please refer to the release notes for more information.'
+ )
def delayed_job(self, job_type):
'''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned.
@@ -2033,7 +2151,7 @@ class Session(object):
}
try:
- result = self._call(
+ result = self.call(
[operation]
)[0]
@@ -2070,7 +2188,7 @@ class Session(object):
)
try:
- result = self._call([operation])
+ result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@@ -2172,7 +2290,7 @@ class Session(object):
}
try:
- result = self._call([operation])
+ result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@@ -2212,7 +2330,7 @@ class Session(object):
}
try:
- result = self._call([operation])
+ result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@@ -2258,7 +2376,7 @@ class Session(object):
)
try:
- self._call(operations)
+ self.call(operations)
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@@ -2306,7 +2424,7 @@ class Session(object):
)
try:
- self._call(operations)
+ self.call(operations)
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
if 'Invalid action u\'send_review_session_invite\'' in error.message:
diff --git a/pype/vendor/ftrack_api_old/symbol.py b/pype/vendor/ftrack_api_old/symbol.py
index 10b3f55bd5..f46760f634 100644
--- a/pype/vendor/ftrack_api_old/symbol.py
+++ b/pype/vendor/ftrack_api_old/symbol.py
@@ -1,6 +1,8 @@
# :coding: utf-8
# :copyright: Copyright (c) 2014 ftrack
+import os
+
class Symbol(object):
'''A constant symbol.'''
@@ -68,8 +70,8 @@ CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b'
#: Identifier of builtin server location.
SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b'
-#: Chunk size used when working with data.
-CHUNK_SIZE = 8192
+#: Chunk size used when working with data, default to 1Mb.
+CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024
#: Symbol representing syncing users with ldap
JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP')
diff --git a/schema/application-1.0.json b/schema/application-1.0.json
new file mode 100644
index 0000000000..e2418037c6
--- /dev/null
+++ b/schema/application-1.0.json
@@ -0,0 +1,68 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:application-1.0",
+ "description": "An application definition.",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "label",
+ "application_dir",
+ "executable"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string"
+ },
+ "label": {
+ "description": "Nice name of application.",
+ "type": "string"
+ },
+ "application_dir": {
+ "description": "Name of directory used for application resources.",
+ "type": "string"
+ },
+ "executable": {
+ "description": "Name of callable executable, this is called to launch the application",
+ "type": "string"
+ },
+ "description": {
+ "description": "Description of application.",
+ "type": "string"
+ },
+ "environment": {
+ "description": "Key/value pairs for environment variables related to this application. Supports lists for paths, such as PYTHONPATH.",
+ "type": "object",
+ "items": {
+ "oneOf": [
+ {"type": "string"},
+ {"type": "array", "items": {"type": "string"}}
+ ]
+ }
+ },
+ "default_dirs": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "copy": {
+ "type": "object",
+ "patternProperties": {
+ "^.*$": {
+ "anyOf": [
+ {"type": "string"},
+ {"type": "null"}
+ ]
+ }
+ },
+ "additionalProperties": false
+ }
+ }
+}
diff --git a/schema/asset-1.0.json b/schema/asset-1.0.json
new file mode 100644
index 0000000000..6f3665c628
--- /dev/null
+++ b/schema/asset-1.0.json
@@ -0,0 +1,35 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:asset-1.0",
+ "description": "A unit of data",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "name",
+ "subsets"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string"
+ },
+ "name": {
+ "description": "Name of directory",
+ "type": "string"
+ },
+ "subsets": {
+ "type": "array",
+ "items": {
+ "$ref": "subset.json"
+ }
+ }
+ },
+
+ "definitions": {}
+}
\ No newline at end of file
diff --git a/schema/asset-2.0.json b/schema/asset-2.0.json
new file mode 100644
index 0000000000..066cb33498
--- /dev/null
+++ b/schema/asset-2.0.json
@@ -0,0 +1,55 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:asset-2.0",
+ "description": "A unit of data",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "name",
+ "silo",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string",
+ "enum": ["avalon-core:asset-2.0"],
+ "example": "avalon-core:asset-2.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["asset"],
+ "example": "asset"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of asset",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "Bruce"
+ },
+ "silo": {
+ "description": "Group or container of asset",
+ "type": "string",
+ "example": "assets"
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "example": {"key": "value"}
+ }
+ },
+
+ "definitions": {}
+}
diff --git a/schema/asset-3.0.json b/schema/asset-3.0.json
new file mode 100644
index 0000000000..a3a22e917b
--- /dev/null
+++ b/schema/asset-3.0.json
@@ -0,0 +1,55 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:asset-3.0",
+ "description": "A unit of data",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string",
+ "enum": ["avalon-core:asset-3.0", "pype:asset-3.0"],
+ "example": "avalon-core:asset-3.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["asset"],
+ "example": "asset"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of asset",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "Bruce"
+ },
+ "silo": {
+ "description": "Group or container of asset",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "assets"
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "example": {"key": "value"}
+ }
+ },
+
+ "definitions": {}
+}
diff --git a/schema/config-1.0.json b/schema/config-1.0.json
new file mode 100644
index 0000000000..b3c4362f41
--- /dev/null
+++ b/schema/config-1.0.json
@@ -0,0 +1,86 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:config-1.0",
+ "description": "A project configuration.",
+
+ "type": "object",
+
+ "additionalProperties": false,
+ "required": [
+ "template",
+ "tasks",
+ "apps"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string"
+ },
+ "template": {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^.*$": {
+ "type": "string"
+ }
+ }
+ },
+ "tasks": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "icon": {"type": "string"},
+ "group": {"type": "string"},
+ "label": {"type": "string"}
+ },
+ "required": ["name"]
+ }
+ },
+ "apps": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "icon": {"type": "string"},
+ "group": {"type": "string"},
+ "label": {"type": "string"}
+ },
+ "required": ["name"]
+ }
+ },
+ "families": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "icon": {"type": "string"},
+ "label": {"type": "string"},
+ "hideFilter": {"type": "boolean"}
+ },
+ "required": ["name"]
+ }
+ },
+ "groups": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "icon": {"type": "string"},
+ "color": {"type": "string"},
+ "order": {"type": ["integer", "number"]}
+ },
+ "required": ["name"]
+ }
+ },
+ "copy": {
+ "type": "object"
+ }
+ }
+}
diff --git a/schema/container-1.0.json b/schema/container-1.0.json
new file mode 100644
index 0000000000..d9e4e39f7f
--- /dev/null
+++ b/schema/container-1.0.json
@@ -0,0 +1,100 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:container-1.0",
+ "description": "A loaded asset",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "id",
+ "objectName",
+ "name",
+ "author",
+ "loader",
+ "families",
+ "time",
+ "subset",
+ "asset",
+ "representation",
+ "version",
+ "silo",
+ "path",
+ "source"
+ ],
+ "properties": {
+ "id": {
+ "description": "Identifier for finding object in host",
+ "type": "string",
+ "enum": ["pyblish.mindbender.container"],
+ "example": "pyblish.mindbender.container"
+ },
+ "objectName": {
+ "description": "Name of internal object, such as the objectSet in Maya.",
+ "type": "string",
+ "example": "Bruce_:rigDefault_CON"
+ },
+ "name": {
+ "description": "Full name of application object",
+ "type": "string",
+ "example": "modelDefault"
+ },
+ "author": {
+ "description": "Name of the author of the published version",
+ "type": "string",
+ "example": "Marcus Ottosson"
+ },
+ "loader": {
+ "description": "Name of loader plug-in used to produce this container",
+ "type": "string",
+ "example": "ModelLoader"
+ },
+ "families": {
+ "description": "Families associated with the this subset",
+ "type": "string",
+ "example": "mindbender.model"
+ },
+ "time": {
+ "description": "File-system safe, formatted time",
+ "type": "string",
+ "example": "20170329T131545Z"
+ },
+ "subset": {
+ "description": "Name of source subset",
+ "type": "string",
+ "example": "modelDefault"
+ },
+ "asset": {
+ "description": "Name of source asset",
+ "type": "string" ,
+ "example": "Bruce"
+ },
+ "representation": {
+ "description": "Name of source representation",
+ "type": "string" ,
+ "example": ".ma"
+ },
+ "version": {
+ "description": "Version number",
+ "type": "number",
+ "example": 12
+ },
+ "silo": {
+ "description": "Silo of parent asset",
+ "type": "string",
+ "example": "assets"
+ },
+ "path": {
+ "description": "Absolute path on disk",
+ "type": "string",
+ "example": "{root}/assets/Bruce/publish/rigDefault/v002"
+ },
+ "source": {
+ "description": "Absolute path to file from which this version was published",
+ "type": "string",
+ "example": "{root}/assets/Bruce/work/rigging/maya/scenes/rig_v001.ma"
+ }
+ }
+}
diff --git a/schema/container-2.0.json b/schema/container-2.0.json
new file mode 100644
index 0000000000..7b84209ea0
--- /dev/null
+++ b/schema/container-2.0.json
@@ -0,0 +1,59 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:container-2.0",
+ "description": "A loaded asset",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "id",
+ "objectName",
+ "name",
+ "namespace",
+ "loader",
+ "representation"
+ ],
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string",
+ "enum": ["avalon-core:container-2.0", "pype:container-2.0"],
+ "example": "pype:container-2.0"
+ },
+ "id": {
+ "description": "Identifier for finding object in host",
+ "type": "string",
+ "enum": ["pyblish.avalon.container"],
+ "example": "pyblish.avalon.container"
+ },
+ "objectName": {
+ "description": "Name of internal object, such as the objectSet in Maya.",
+ "type": "string",
+ "example": "Bruce_:rigDefault_CON"
+ },
+ "loader": {
+ "description": "Name of loader plug-in used to produce this container",
+ "type": "string",
+ "example": "ModelLoader"
+ },
+ "name": {
+ "description": "Internal object name of container in application",
+ "type": "string",
+ "example": "modelDefault_01"
+ },
+ "namespace": {
+ "description": "Internal namespace of container in application",
+ "type": "string",
+ "example": "Bruce_"
+ },
+ "representation": {
+ "description": "Unique id of representation in database",
+ "type": "string",
+ "example": "59523f355f8c1b5f6c5e8348"
+ }
+ }
+}
\ No newline at end of file
diff --git a/schema/inventory-1.0.json b/schema/inventory-1.0.json
new file mode 100644
index 0000000000..888ba7945a
--- /dev/null
+++ b/schema/inventory-1.0.json
@@ -0,0 +1,10 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:config-1.0",
+ "description": "A project configuration.",
+
+ "type": "object",
+
+ "additionalProperties": true
+}
diff --git a/schema/project-2.0.json b/schema/project-2.0.json
new file mode 100644
index 0000000000..ad0e460f4d
--- /dev/null
+++ b/schema/project-2.0.json
@@ -0,0 +1,86 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:project-2.0",
+ "description": "A unit of data",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "name",
+ "data",
+ "config"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string",
+ "enum": ["avalon-core:project-2.0", "pype:project-2.0"],
+ "example": "avalon-core:project-2.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["project"],
+ "example": "project"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of directory",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "hulk"
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "example": {
+ "fps": 24,
+ "width": 1920,
+ "height": 1080
+ }
+ },
+ "config": {
+ "type": "object",
+ "description": "Document metadata",
+ "example": {
+ "schema": "pype:config-1.0",
+ "apps": [
+ {
+ "name": "maya2016",
+ "label": "Autodesk Maya 2016"
+ },
+ {
+ "name": "nuke10",
+ "label": "The Foundry Nuke 10.0"
+ }
+ ],
+ "tasks": [
+ {"name": "model"},
+ {"name": "render"},
+ {"name": "animate"},
+ {"name": "rig"},
+ {"name": "lookdev"},
+ {"name": "layout"}
+ ],
+ "template": {
+ "work":
+ "{root}/{project}/{silo}/{asset}/work/{task}/{app}",
+ "publish":
+ "{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}"
+ }
+ },
+ "$ref": "config-1.0.json"
+ }
+ },
+
+ "definitions": {}
+}
diff --git a/schema/representation-1.0.json b/schema/representation-1.0.json
new file mode 100644
index 0000000000..10ae72928e
--- /dev/null
+++ b/schema/representation-1.0.json
@@ -0,0 +1,28 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:representation-1.0",
+ "description": "The inverse of an instance",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "format",
+ "path"
+ ],
+
+ "properties": {
+ "schema": {"type": "string"},
+ "format": {
+ "description": "File extension, including '.'",
+ "type": "string"
+ },
+ "path": {
+ "description": "Unformatted path to version.",
+ "type": "string"
+ }
+ }
+}
diff --git a/schema/representation-2.0.json b/schema/representation-2.0.json
new file mode 100644
index 0000000000..e12dea8564
--- /dev/null
+++ b/schema/representation-2.0.json
@@ -0,0 +1,78 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:representation-2.0",
+ "description": "The inverse of an instance",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "parent",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string",
+ "enum": ["avalon-core:representation-2.0", "pype:representation-2.0"],
+ "example": "pype:representation-2.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["representation"],
+ "example": "representation"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of representation",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "abc"
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "example": {
+ "label": "Alembic"
+ }
+ },
+ "dependencies": {
+ "description": "Other representation that this representation depends on",
+ "type": "array",
+ "items": {"type": "string"},
+ "example": [
+ "592d547a5f8c1b388093c145"
+ ]
+ },
+ "context": {
+ "description": "Summary of the context to which this representation belong.",
+ "type": "object",
+ "properties": {
+ "project": {"type": "object"},
+ "asset": {"type": "string"},
+ "silo": {"type": ["string", "null"]},
+ "subset": {"type": "string"},
+ "version": {"type": "number"},
+ "representation": {"type": "string"}
+ },
+ "example": {
+ "project": "hulk",
+ "asset": "Bruce",
+ "silo": "assets",
+ "subset": "rigDefault",
+ "version": 12,
+ "representation": "ma"
+ }
+ }
+ }
+}
diff --git a/schema/session-1.0.json b/schema/session-1.0.json
new file mode 100644
index 0000000000..2b201f9c61
--- /dev/null
+++ b/schema/session-1.0.json
@@ -0,0 +1,143 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:session-1.0",
+ "description": "The Avalon environment",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "AVALON_PROJECTS",
+ "AVALON_PROJECT",
+ "AVALON_ASSET",
+ "AVALON_SILO",
+ "AVALON_CONFIG"
+ ],
+
+ "properties": {
+ "AVALON_PROJECTS": {
+ "description": "Absolute path to root of project directories",
+ "type": "string",
+ "example": "/nas/projects"
+ },
+ "AVALON_PROJECT": {
+ "description": "Name of project",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "Hulk"
+ },
+ "AVALON_ASSET": {
+ "description": "Name of asset",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "Bruce"
+ },
+ "AVALON_SILO": {
+ "description": "Name of asset group or container",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "assets"
+ },
+ "AVALON_TASK": {
+ "description": "Name of task",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "modeling"
+ },
+ "AVALON_CONFIG": {
+ "description": "Name of Avalon configuration",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "polly"
+ },
+ "AVALON_APP": {
+ "description": "Name of application",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "maya2016"
+ },
+ "AVALON_MONGO": {
+ "description": "Address to the asset database",
+ "type": "string",
+ "pattern": "^mongodb://[\\w/@:.]*$",
+ "example": "mongodb://localhost:27017",
+ "default": "mongodb://localhost:27017"
+ },
+ "AVALON_DB": {
+ "description": "Name of database",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "avalon",
+ "default": "avalon"
+ },
+ "AVALON_LABEL": {
+ "description": "Nice name of Avalon, used in e.g. graphical user interfaces",
+ "type": "string",
+ "example": "Mindbender",
+ "default": "Avalon"
+ },
+ "AVALON_SENTRY": {
+ "description": "Address to Sentry",
+ "type": "string",
+ "pattern": "^http[\\w/@:.]*$",
+ "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2",
+ "default": null
+ },
+ "AVALON_DEADLINE": {
+ "description": "Address to Deadline",
+ "type": "string",
+ "pattern": "^http[\\w/@:.]*$",
+ "example": "http://192.168.99.101",
+ "default": null
+ },
+ "AVALON_TIMEOUT": {
+ "description": "Wherever there is a need for a timeout, this is the default value.",
+ "type": "string",
+ "pattern": "^[0-9]*$",
+ "default": "1000",
+ "example": "1000"
+ },
+ "AVALON_UPLOAD": {
+ "description": "Boolean of whether to upload published material to central asset repository",
+ "type": "string",
+ "default": null,
+ "example": "True"
+ },
+ "AVALON_USERNAME": {
+ "description": "Generic username",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "default": "avalon",
+ "example": "myself"
+ },
+ "AVALON_PASSWORD": {
+ "description": "Generic password",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "default": "secret",
+ "example": "abc123"
+ },
+ "AVALON_INSTANCE_ID": {
+ "description": "Unique identifier for instances in a working file",
+ "type": "string",
+ "pattern": "^[\\w.]*$",
+ "default": "avalon.instance",
+ "example": "avalon.instance"
+ },
+ "AVALON_CONTAINER_ID": {
+ "description": "Unique identifier for a loaded representation in a working file",
+ "type": "string",
+ "pattern": "^[\\w.]*$",
+ "default": "avalon.container",
+ "example": "avalon.container"
+ },
+ "AVALON_DEBUG": {
+ "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.",
+ "type": "string",
+ "default": null,
+ "example": "True"
+ }
+ }
+}
\ No newline at end of file
diff --git a/schema/session-2.0.json b/schema/session-2.0.json
new file mode 100644
index 0000000000..006a9e2dbf
--- /dev/null
+++ b/schema/session-2.0.json
@@ -0,0 +1,142 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:session-2.0",
+ "description": "The Avalon environment",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "AVALON_PROJECTS",
+ "AVALON_PROJECT",
+ "AVALON_ASSET",
+ "AVALON_CONFIG"
+ ],
+
+ "properties": {
+ "AVALON_PROJECTS": {
+ "description": "Absolute path to root of project directories",
+ "type": "string",
+ "example": "/nas/projects"
+ },
+ "AVALON_PROJECT": {
+ "description": "Name of project",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "Hulk"
+ },
+ "AVALON_ASSET": {
+ "description": "Name of asset",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "Bruce"
+ },
+ "AVALON_SILO": {
+ "description": "Name of asset group or container",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "assets"
+ },
+ "AVALON_TASK": {
+ "description": "Name of task",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "modeling"
+ },
+ "AVALON_CONFIG": {
+ "description": "Name of Avalon configuration",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "polly"
+ },
+ "AVALON_APP": {
+ "description": "Name of application",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "maya2016"
+ },
+ "AVALON_MONGO": {
+ "description": "Address to the asset database",
+ "type": "string",
+ "pattern": "^mongodb://[\\w/@:.]*$",
+ "example": "mongodb://localhost:27017",
+ "default": "mongodb://localhost:27017"
+ },
+ "AVALON_DB": {
+ "description": "Name of database",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "example": "avalon",
+ "default": "avalon"
+ },
+ "AVALON_LABEL": {
+ "description": "Nice name of Avalon, used in e.g. graphical user interfaces",
+ "type": "string",
+ "example": "Mindbender",
+ "default": "Avalon"
+ },
+ "AVALON_SENTRY": {
+ "description": "Address to Sentry",
+ "type": "string",
+ "pattern": "^http[\\w/@:.]*$",
+ "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2",
+ "default": null
+ },
+ "AVALON_DEADLINE": {
+ "description": "Address to Deadline",
+ "type": "string",
+ "pattern": "^http[\\w/@:.]*$",
+ "example": "http://192.168.99.101",
+ "default": null
+ },
+ "AVALON_TIMEOUT": {
+ "description": "Wherever there is a need for a timeout, this is the default value.",
+ "type": "string",
+ "pattern": "^[0-9]*$",
+ "default": "1000",
+ "example": "1000"
+ },
+ "AVALON_UPLOAD": {
+ "description": "Boolean of whether to upload published material to central asset repository",
+ "type": "string",
+ "default": null,
+ "example": "True"
+ },
+ "AVALON_USERNAME": {
+ "description": "Generic username",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "default": "avalon",
+ "example": "myself"
+ },
+ "AVALON_PASSWORD": {
+ "description": "Generic password",
+ "type": "string",
+ "pattern": "^\\w*$",
+ "default": "secret",
+ "example": "abc123"
+ },
+ "AVALON_INSTANCE_ID": {
+ "description": "Unique identifier for instances in a working file",
+ "type": "string",
+ "pattern": "^[\\w.]*$",
+ "default": "avalon.instance",
+ "example": "avalon.instance"
+ },
+ "AVALON_CONTAINER_ID": {
+ "description": "Unique identifier for a loaded representation in a working file",
+ "type": "string",
+ "pattern": "^[\\w.]*$",
+ "default": "avalon.container",
+ "example": "avalon.container"
+ },
+ "AVALON_DEBUG": {
+ "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.",
+ "type": "string",
+ "default": null,
+ "example": "True"
+ }
+ }
+}
diff --git a/schema/shaders-1.0.json b/schema/shaders-1.0.json
new file mode 100644
index 0000000000..e66cc735e8
--- /dev/null
+++ b/schema/shaders-1.0.json
@@ -0,0 +1,32 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:shaders-1.0",
+ "description": "Relationships between shaders and Avalon IDs",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "shader"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string"
+ },
+ "shader": {
+ "description": "Name of directory",
+ "type": "array",
+ "items": {
+ "type": "str",
+ "description": "Avalon ID and optional face indexes, e.g. 'f9520572-ac1d-11e6-b39e-3085a99791c9.f[5002:5185]'"
+ }
+ }
+ },
+
+ "definitions": {}
+}
diff --git a/schema/subset-1.0.json b/schema/subset-1.0.json
new file mode 100644
index 0000000000..90ae0349fa
--- /dev/null
+++ b/schema/subset-1.0.json
@@ -0,0 +1,35 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:subset-1.0",
+ "description": "A container of instances",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "name",
+ "versions"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "Schema identifier for payload",
+ "type": "string"
+ },
+ "name": {
+ "description": "Name of directory",
+ "type": "string"
+ },
+ "versions": {
+ "type": "array",
+ "items": {
+ "$ref": "version.json"
+ }
+ }
+ },
+
+ "definitions": {}
+}
\ No newline at end of file
diff --git a/schema/subset-2.0.json b/schema/subset-2.0.json
new file mode 100644
index 0000000000..98f39c4f3e
--- /dev/null
+++ b/schema/subset-2.0.json
@@ -0,0 +1,51 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:subset-2.0",
+ "description": "A container of instances",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "parent",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["pype:subset-2.0"],
+ "example": "pype:subset-2.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["subset"],
+ "example": "subset"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of directory",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "shot01"
+ },
+ "data": {
+ "type": "object",
+ "description": "Document metadata",
+ "example": {
+ "frameStart": 1000,
+ "frameEnd": 1201
+ }
+ }
+ }
+}
diff --git a/schema/subset-3.0.json b/schema/subset-3.0.json
new file mode 100644
index 0000000000..a0af9d340f
--- /dev/null
+++ b/schema/subset-3.0.json
@@ -0,0 +1,62 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:subset-3.0",
+ "description": "A container of instances",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "parent",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["avalon-core:subset-3.0", "pype:subset-3.0"],
+ "example": "pype:subset-3.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["subset"],
+ "example": "subset"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Name of directory",
+ "type": "string",
+ "pattern": "^[a-zA-Z0-9_.]*$",
+ "example": "shot01"
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "required": ["families"],
+ "properties": {
+ "families": {
+ "type": "array",
+ "items": {"type": "string"},
+ "description": "One or more families associated with this subset"
+ }
+ },
+ "example": {
+ "families" : [
+ "avalon.camera"
+ ],
+ "frameStart": 1000,
+ "frameEnd": 1201
+ }
+ }
+ }
+}
diff --git a/schema/thumbnail-1.0.json b/schema/thumbnail-1.0.json
new file mode 100644
index 0000000000..96b540ab7e
--- /dev/null
+++ b/schema/thumbnail-1.0.json
@@ -0,0 +1,42 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:thumbnail-1.0",
+ "description": "Entity with thumbnail data",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["pype:thumbnail-1.0"],
+ "example": "pype:thumbnail-1.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["thumbnail"],
+ "example": "thumbnail"
+ },
+ "data": {
+ "description": "Thumbnail data",
+ "type": "object",
+ "example": {
+ "binary_data": "Binary({byte data of image})",
+ "template": "{thumbnail_root}/{project[name]}/{_id}{ext}}",
+ "template_data": {
+ "ext": ".jpg"
+ }
+ }
+ }
+ }
+}
diff --git a/schema/version-1.0.json b/schema/version-1.0.json
new file mode 100644
index 0000000000..c784a25175
--- /dev/null
+++ b/schema/version-1.0.json
@@ -0,0 +1,50 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:version-1.0",
+ "description": "An individual version",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "version",
+ "path",
+ "time",
+ "author",
+ "source",
+ "representations"
+ ],
+
+ "properties": {
+ "schema": {"type": "string"},
+ "representations": {
+ "type": "array",
+ "items": {
+ "$ref": "representation.json"
+ }
+ },
+ "time": {
+ "description": "ISO formatted, file-system compatible time",
+ "type": "string"
+ },
+ "author": {
+ "description": "User logged on to the machine at time of publish",
+ "type": "string"
+ },
+ "version": {
+ "description": "Number of this version",
+ "type": "number"
+ },
+ "path": {
+ "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
+ "type": "string"
+ },
+ "source": {
+ "description": "Original file from which this version was made.",
+ "type": "string"
+ }
+ }
+}
diff --git a/schema/version-2.0.json b/schema/version-2.0.json
new file mode 100644
index 0000000000..5bb4a56f96
--- /dev/null
+++ b/schema/version-2.0.json
@@ -0,0 +1,92 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:version-2.0",
+ "description": "An individual version",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "parent",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["pype:version-2.0"],
+ "example": "pype:version-2.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["version"],
+ "example": "version"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Number of version",
+ "type": "number",
+ "example": 12
+ },
+ "locations": {
+ "description": "Where on the planet this version can be found.",
+ "type": "array",
+ "items": {"type": "string"},
+ "example": ["data.avalon.com"]
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "required": ["families", "author", "source", "time"],
+ "properties": {
+ "time": {
+ "description": "ISO formatted, file-system compatible time",
+ "type": "string"
+ },
+ "timeFormat": {
+ "description": "ISO format of time",
+ "type": "string"
+ },
+ "author": {
+ "description": "User logged on to the machine at time of publish",
+ "type": "string"
+ },
+ "version": {
+ "description": "Number of this version",
+ "type": "number"
+ },
+ "path": {
+ "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
+ "type": "string"
+ },
+ "source": {
+ "description": "Original file from which this version was made.",
+ "type": "string"
+ },
+ "families": {
+ "type": "array",
+ "items": {"type": "string"},
+ "description": "One or more families associated with this version"
+ }
+ },
+ "example": {
+ "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma",
+ "author" : "marcus",
+ "families" : [
+ "avalon.model"
+ ],
+ "time" : "20170510T090203Z"
+ }
+ }
+ }
+}
diff --git a/schema/version-3.0.json b/schema/version-3.0.json
new file mode 100644
index 0000000000..808650da0d
--- /dev/null
+++ b/schema/version-3.0.json
@@ -0,0 +1,84 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:version-3.0",
+ "description": "An individual version",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "schema",
+ "type",
+ "parent",
+ "name",
+ "data"
+ ],
+
+ "properties": {
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["avalon-core:version-3.0", "pype:version-3.0"],
+ "example": "pype:version-3.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["version"],
+ "example": "version"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "592c33475f8c1b064c4d1696"
+ },
+ "name": {
+ "description": "Number of version",
+ "type": "number",
+ "example": 12
+ },
+ "locations": {
+ "description": "Where on the planet this version can be found.",
+ "type": "array",
+ "items": {"type": "string"},
+ "example": ["data.avalon.com"]
+ },
+ "data": {
+ "description": "Document metadata",
+ "type": "object",
+ "required": ["author", "source", "time"],
+ "properties": {
+ "time": {
+ "description": "ISO formatted, file-system compatible time",
+ "type": "string"
+ },
+ "timeFormat": {
+ "description": "ISO format of time",
+ "type": "string"
+ },
+ "author": {
+ "description": "User logged on to the machine at time of publish",
+ "type": "string"
+ },
+ "version": {
+ "description": "Number of this version",
+ "type": "number"
+ },
+ "path": {
+ "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
+ "type": "string"
+ },
+ "source": {
+ "description": "Original file from which this version was made.",
+ "type": "string"
+ }
+ },
+ "example": {
+ "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma",
+ "author" : "marcus",
+ "time" : "20170510T090203Z"
+ }
+ }
+ }
+}