Merge branch 'develop' into feature/PYPE-657-extract-review-sequence-support

This commit is contained in:
Milan Kolar 2020-01-31 22:59:20 +01:00
commit 8ece466d9f
87 changed files with 2523 additions and 677 deletions

View file

@ -312,42 +312,32 @@ class Delivery(BaseAction):
anatomy_data = copy.deepcopy(repre["context"])
anatomy_data["root"] = location_path
anatomy_filled = anatomy.format(anatomy_data)
test_path = (
anatomy_filled
.get("delivery", {})
.get(anatomy_name)
)
anatomy_filled = anatomy.format_all(anatomy_data)
test_path = anatomy_filled["delivery"][anatomy_name]
if not test_path:
if not test_path.solved:
msg = (
"Missing keys in Representation's context"
" for anatomy template \"{}\"."
).format(anatomy_name)
all_anatomies = anatomy.format_all(anatomy_data)
result = None
for anatomies in all_anatomies.values():
for key, temp in anatomies.get("delivery", {}).items():
if key != anatomy_name:
continue
if test_path.missing_keys:
keys = ", ".join(test_path.missing_keys)
sub_msg = (
"Representation: {}<br>- Missing keys: \"{}\"<br>"
).format(str(repre["_id"]), keys)
result = temp
break
if test_path.invalid_types:
items = []
for key, value in test_path.invalid_types.items():
items.append("\"{}\" {}".format(key, str(value)))
# TODO log error! - missing keys in anatomy
if result:
missing_keys = [
key[1] for key in string.Formatter().parse(result)
if key[1] is not None
]
else:
missing_keys = ["unknown"]
keys = ", ".join(items)
sub_msg = (
"Representation: {}<br>"
"- Invalid value DataType: \"{}\"<br>"
).format(str(repre["_id"]), keys)
keys = ", ".join(missing_keys)
sub_msg = (
"Representation: {}<br>- Missing keys: \"{}\"<br>"
).format(str(repre["_id"]), keys)
self.report_items[msg].append(sub_msg)
self.log.warning(
"{} Representation: \"{}\" Filled: <{}>".format(

View file

@ -31,7 +31,7 @@ class SyncToAvalonEvent(BaseEvent):
"timelog", "auth_userrole", "appointment"
]
ignore_ent_types = ["Milestone"]
ignore_keys = ["statusid"]
ignore_keys = ["statusid", "thumbid"]
project_query = (
"select full_name, name, custom_attributes"
@ -486,6 +486,14 @@ class SyncToAvalonEvent(BaseEvent):
action = ent_info["action"]
ftrack_id = ent_info["entityId"]
if isinstance(ftrack_id, list):
self.log.warning((
"BUG REPORT: Entity info has `entityId` as `list` \"{}\""
).format(ent_info))
if len(ftrack_id) == 0:
continue
ftrack_id = ftrack_id[0]
if action == "move":
ent_keys = ent_info["keys"]
# Seprate update info from move action
@ -1820,6 +1828,13 @@ class SyncToAvalonEvent(BaseEvent):
obj_type_id = ent_info["objectTypeId"]
ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id)
if ent_cust_attrs is None:
self.log.warning((
"BUG REPORT: Entity has ent type without"
" custom attributes <{}> \"{}\""
).format(entType, ent_info))
continue
for key, values in ent_info["changes"].items():
if key in hier_attrs_keys:
self.hier_cust_attrs_changes[key].append(ftrack_id)

View file

@ -207,7 +207,9 @@ class UserAssigmentEvent(BaseEvent):
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy.format(data)['avalon']['work']
# we also need publish but not whole
publish = anatomy.format_all(data)['partial']['avalon']['publish']
filled_all = anatomy.format_all(data)
publish = filled_all['avalon']['publish']
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),
publish)

View file

@ -1,4 +1,5 @@
import os
import sys
import time
import socket
import threading
@ -52,8 +53,7 @@ class SocketThread(threading.Thread):
)
self.subproc = subprocess.Popen(
["python", self.filepath, "-port", str(self.port)],
stdout=subprocess.PIPE
[sys.executable, self.filepath, "-port", str(self.port)]
)
# Listen for incoming connections
@ -115,11 +115,6 @@ class SocketThread(threading.Thread):
if self.subproc.poll() is None:
self.subproc.terminate()
lines = self.subproc.stdout.readlines()
if lines:
print("*** Socked Thread stdout ***")
for line in lines:
os.write(1, line)
self.finished = True
def get_data_from_con(self, connection):

View file

@ -2,12 +2,14 @@ import sys
import signal
import socket
import traceback
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub
from pypeapp import Logger
log = Logger().get_logger(__name__)
log = Logger().get_logger("FtrackUserServer")
def main(args):
@ -18,7 +20,9 @@ def main(args):
# Connect the socket to the port where the server is listening
server_address = ("localhost", port)
log.debug("Storer connected to {} port {}".format(*server_address))
log.debug(
"User Ftrack Server connected to {} port {}".format(*server_address)
)
sock.connect(server_address)
sock.sendall(b"CreatedUser")
@ -27,8 +31,10 @@ def main(args):
auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub
)
server = FtrackServer("action")
log.debug("Launched Ftrack Event storer")
log.debug("Launched User Ftrack Server")
server.run_server(session=session)
except Exception:
traceback.print_exception(*sys.exc_info())
finally:
log.debug("Closing socket")
@ -42,7 +48,6 @@ if __name__ == "__main__":
log.info(
"Process was forced to stop. Process ended."
)
log.info("Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)

View file

@ -1722,7 +1722,11 @@ class SyncEntitiesFactory:
self.avalon_project_id = new_id
self._avalon_ents_by_id[str(new_id)] = project_item
if self._avalon_ents_by_ftrack_id is None:
self._avalon_ents_by_ftrack_id = {}
self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
if self._avalon_ents_by_name is None:
self._avalon_ents_by_name = {}
self._avalon_ents_by_name[project_item["name"]] = str(new_id)
self.create_list.append(project_item)
@ -1991,7 +1995,7 @@ class SyncEntitiesFactory:
vis_par = ent["data"]["visualParent"]
if (
vis_par is not None and
str(vis_par) in self.deleted_entities
str(vis_par) in _deleted_entities
):
continue
_ready.append(mongo_id)

View file

@ -171,7 +171,7 @@ class FtrackModule:
# If thread failed test Ftrack and Mongo connection
elif not self.thread_socket_server.isAlive():
self.thread_socket_server_thread.join()
self.thread_socket_server.join()
self.thread_socket_server = None
ftrack_accessible = False

View file

@ -162,6 +162,7 @@ def on_open(_):
# Validate FPS after update_task_from_path to
# ensure it is using correct FPS for the asset
lib.validate_fps()
lib.fix_incompatible_containers()
if any_outdated():
log.warning("Scene has outdated content.")

View file

@ -2318,6 +2318,25 @@ def get_attr_in_layer(attr, layer):
return cmds.getAttr(attr)
def fix_incompatible_containers():
"""Return whether the current scene has any outdated content"""
host = avalon.api.registered_host()
for container in host.ls():
loader = container['loader']
print(container['loader'])
if loader in ["MayaAsciiLoader",
"AbcLoader",
"ModelLoader",
"CameraLoader",
"RigLoader",
"FBXLoader"]:
cmds.setAttr(container["objectName"] + ".loader",
"ReferenceLoader", type="string")
def _null(*args):
pass

View file

@ -15,12 +15,13 @@ log = logging.getLogger(__name__)
def _get_menu():
"""Return the menu instance if it currently exists in Maya"""
app = QtWidgets.QApplication.instance()
widgets = dict((w.objectName(), w) for w in app.allWidgets())
widgets = dict((
w.objectName(), w) for w in QtWidgets.QApplication.allWidgets())
menu = widgets.get(self._menu)
return menu
def deferred():
log.info("Attempting to install scripts menu..")

View file

@ -33,40 +33,41 @@ if os.getenv("PYBLISH_GUI", None):
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
class NukeHandler(logging.Handler):
'''
Nuke Handler - emits logs into nuke's script editor.
warning will emit nuke.warning()
critical and fatal would popup msg dialog to alert of the error.
'''
# class NukeHandler(logging.Handler):
# '''
# Nuke Handler - emits logs into nuke's script editor.
# warning will emit nuke.warning()
# critical and fatal would popup msg dialog to alert of the error.
# '''
#
# def __init__(self):
# logging.Handler.__init__(self)
# self.set_name("Pype_Nuke_Handler")
#
# def emit(self, record):
# # Formated message:
# msg = self.format(record)
#
# if record.levelname.lower() in [
# # "warning",
# "critical",
# "fatal",
# "error"
# ]:
# msg = self.format(record)
# nuke.message(msg)
#
#
# '''Adding Nuke Logging Handler'''
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
# nuke_handler = NukeHandler()
# if nuke_handler.get_name() \
# not in [handler.get_name()
# for handler in logging.root.handlers[:]]:
# logging.getLogger().addHandler(nuke_handler)
# logging.getLogger().setLevel(logging.INFO)
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
def __init__(self):
logging.Handler.__init__(self)
self.set_name("Pype_Nuke_Handler")
def emit(self, record):
# Formated message:
msg = self.format(record)
if record.levelname.lower() in [
# "warning",
"critical",
"fatal",
"error"
]:
msg = self.format(record)
nuke.message(msg)
'''Adding Nuke Logging Handler'''
log.info([handler.get_name() for handler in logging.root.handlers[:]])
nuke_handler = NukeHandler()
if nuke_handler.get_name() \
not in [handler.get_name()
for handler in logging.root.handlers[:]]:
logging.getLogger().addHandler(nuke_handler)
logging.getLogger().setLevel(logging.INFO)
log.info([handler.get_name() for handler in logging.root.handlers[:]])
def reload_config():
"""Attempt to reload pipeline at run-time.
@ -113,7 +114,7 @@ def install():
family_states = [
"write",
"review",
"nukenodes"
"nukenodes"
"gizmo"
]

View file

@ -21,7 +21,6 @@ from .presets import (
from .presets import (
get_anatomy
)
# TODO: remove get_anatomy and import directly Anatomy() here
from pypeapp import Logger
log = Logger().get_logger(__name__, "nuke")
@ -50,8 +49,6 @@ def checkInventoryVersions():
and check if the node is having actual version. If not then it will color
it to red.
"""
# TODO: make it for all nodes not just Read (Loader
# get all Loader nodes by avalon attribute metadata
for each in nuke.allNodes():
if each.Class() == 'Read':
@ -93,7 +90,6 @@ def checkInventoryVersions():
def writes_version_sync():
''' Callback synchronizing version of publishable write nodes
'''
# TODO: make it work with new write node group
try:
rootVersion = pype.get_version_from_path(nuke.root().name())
padding = len(rootVersion)
@ -130,7 +126,8 @@ def writes_version_sync():
os.makedirs(os.path.dirname(node_new_file), 0o766)
except Exception as e:
log.warning(
"Write node: `{}` has no version in path: {}".format(each.name(), e))
"Write node: `{}` has no version in path: {}".format(
each.name(), e))
def version_up_script():
@ -183,9 +180,12 @@ def format_anatomy(data):
try:
padding = int(anatomy.templates['render']['padding'])
except KeyError as e:
log.error("`padding` key is not in `render` "
"Anatomy template. Please, add it there and restart "
"the pipeline (padding: \"4\"): `{}`".format(e))
msg = ("`padding` key is not in `render` "
"Anatomy template. Please, add it there and restart "
"the pipeline (padding: \"4\"): `{}`").format(e)
log.error(msg)
nuke.message(msg)
version = data.get("version", None)
if not version:
@ -265,7 +265,9 @@ def create_write_node(name, data, input=None, prenodes=None):
anatomy_filled = format_anatomy(data)
except Exception as e:
log.error("problem with resolving anatomy tepmlate: {}".format(e))
msg = "problem with resolving anatomy tepmlate: {}".format(e)
log.error(msg)
nuke.message(msg)
# build file path to workfiles
fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
@ -543,8 +545,11 @@ class WorkfileSettings(object):
viewer_dict (dict): adjustments from presets
'''
assert isinstance(viewer_dict, dict), log.error(
"set_viewers_colorspace(): argument should be dictionary")
if not isinstance(viewer_dict, dict):
msg = "set_viewers_colorspace(): argument should be dictionary"
log.error(msg)
nuke.message(msg)
return
filter_knobs = [
"viewerProcess",
@ -592,8 +597,10 @@ class WorkfileSettings(object):
root_dict (dict): adjustmensts from presets
'''
assert isinstance(root_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
if not isinstance(root_dict, dict):
msg = "set_root_colorspace(): argument should be dictionary"
log.error(msg)
nuke.message(msg)
log.debug(">> root_dict: {}".format(root_dict))
@ -640,8 +647,11 @@ class WorkfileSettings(object):
'''
# TODO: complete this function so any write node in
# scene will have fixed colorspace following presets for the project
assert isinstance(write_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
if not isinstance(write_dict, dict):
msg = "set_root_colorspace(): argument should be dictionary"
nuke.message(msg)
log.error(msg)
return
log.debug("__ set_writes_colorspace(): {}".format(write_dict))
@ -653,25 +663,28 @@ class WorkfileSettings(object):
try:
self.set_root_colorspace(nuke_colorspace["root"])
except AttributeError:
log.error(
"set_colorspace(): missing `root` settings in template")
msg = "set_colorspace(): missing `root` settings in template"
try:
self.set_viewers_colorspace(nuke_colorspace["viewer"])
except AttributeError:
log.error(
"set_colorspace(): missing `viewer` settings in template")
msg = "set_colorspace(): missing `viewer` settings in template"
nuke.message(msg)
log.error(msg)
try:
self.set_writes_colorspace(nuke_colorspace["write"])
except AttributeError:
log.error(
"set_colorspace(): missing `write` settings in template")
msg = "set_colorspace(): missing `write` settings in template"
nuke.message(msg)
log.error(msg)
try:
for key in nuke_colorspace:
log.debug("Preset's colorspace key: {}".format(key))
except TypeError:
log.error("Nuke is not in templates! \n\n\n"
"contact your supervisor!")
msg = "Nuke is not in templates! Contact your supervisor!"
nuke.message(msg)
log.error(msg)
def reset_frame_range_handles(self):
"""Set frame range to current asset"""
@ -758,13 +771,13 @@ class WorkfileSettings(object):
}
if any(x for x in data.values() if x is None):
log.error(
"Missing set shot attributes in DB."
"\nContact your supervisor!."
"\n\nWidth: `{width}`"
"\nHeight: `{height}`"
"\nPixel Asspect: `{pixel_aspect}`".format(**data)
)
msg = ("Missing set shot attributes in DB."
"\nContact your supervisor!."
"\n\nWidth: `{width}`"
"\nHeight: `{height}`"
"\nPixel Asspect: `{pixel_aspect}`").format(**data)
log.error(msg)
nuke.message(msg)
bbox = self._asset_entity.get('data', {}).get('crop')
@ -781,10 +794,10 @@ class WorkfileSettings(object):
)
except Exception as e:
bbox = None
log.error(
"{}: {} \nFormat:Crop need to be set with dots, example: "
"0.0.1920.1080, /nSetting to default".format(__name__, e)
)
msg = ("{}:{} \nFormat:Crop need to be set with dots, example: "
"0.0.1920.1080, /nSetting to default").format(__name__, e)
log.error(msg)
nuke.message(msg)
existing_format = None
for format in nuke.formats():

View file

@ -1,6 +1,6 @@
from pype import api as pype
from pypeapp import Anatomy, config
import nuke
log = pype.Logger().get_logger(__name__, "nuke")
@ -28,7 +28,7 @@ def get_node_dataflow_preset(**kwarg):
families = kwarg.get("families", [])
preset = kwarg.get("preset", None) # omit < 2.0.0v
assert any([host, cls]), log.error(
assert any([host, cls]), nuke.message(
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
nuke_dataflow = get_dataflow_preset().get(str(host), None)
@ -56,8 +56,10 @@ def get_node_colorspace_preset(**kwarg):
families = kwarg.get("families", [])
preset = kwarg.get("preset", None) # omit < 2.0.0v
assert any([host, cls]), log.error(
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
if not any([host, cls]):
msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)
log.error(msg)
nuke.message(msg)
nuke_colorspace = get_colorspace_preset().get(str(host), None)
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)

View file

@ -7,7 +7,7 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin):
"""Create comments in Ftrack."""
order = pyblish.api.IntegratorOrder
label = "Integrate Comments to Ftrack."
label = "Integrate Comments to Ftrack"
families = ["shot"]
enabled = False

View file

@ -23,25 +23,43 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
# Collect session
session = ftrack_api.Session()
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
context.data["ftrackSession"] = session
# Collect task
project = os.environ.get('AVALON_PROJECT', '')
asset = os.environ.get('AVALON_ASSET', '')
task = os.environ.get('AVALON_TASK', None)
self.log.debug(task)
project_name = os.environ.get('AVALON_PROJECT', '')
asset_name = os.environ.get('AVALON_ASSET', '')
task_name = os.environ.get('AVALON_TASK', None)
# Find project entity
project_query = 'Project where full_name is "{0}"'.format(project_name)
self.log.debug("Project query: < {0} >".format(project_query))
project_entity = session.query(project_query).one()
self.log.debug("Project found: {0}".format(project_entity))
# Find asset entity
entity_query = (
'TypedContext where project_id is "{0}"'
' and name is "{1}"'
).format(project_entity["id"], asset_name)
self.log.debug("Asset entity query: < {0} >".format(entity_query))
asset_entity = session.query(entity_query).one()
self.log.debug("Asset found: {0}".format(asset_entity))
# Find task entity if task is set
if task_name:
task_query = (
'Task where name is "{0}" and parent_id is "{1}"'
).format(task_name, asset_entity["id"])
self.log.debug("Task entity query: < {0} >".format(task_query))
task_entity = session.query(task_query).one()
self.log.debug("Task entity found: {0}".format(task_entity))
if task:
result = session.query('Task where\
project.full_name is "{0}" and\
name is "{1}" and\
parent.name is "{2}"'.format(project, task, asset)).one()
context.data["ftrackTask"] = result
else:
result = session.query('TypedContext where\
project.full_name is "{0}" and\
name is "{1}"'.format(project, asset)).one()
context.data["ftrackEntity"] = result
task_entity = None
self.log.warning("Task name is not set.")
self.log.info(result)
context.data["ftrackProject"] = asset_entity
context.data["ftrackEntity"] = asset_entity
context.data["ftrackTask"] = task_entity

View file

@ -77,6 +77,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
info_msg = "Created new {entity_type} with data: {data}"
info_msg += ", metadata: {metadata}."
used_asset_versions = []
# Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []):
@ -148,6 +149,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
assetversion_cust_attrs = _assetversion_data.pop(
"custom_attributes", {}
)
asset_version_comment = _assetversion_data.pop(
"comment", None
)
assetversion_data.update(_assetversion_data)
assetversion_entity = session.query(
@ -185,6 +189,20 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
existing_assetversion_metadata.update(assetversion_metadata)
assetversion_entity["metadata"] = existing_assetversion_metadata
# Add comment
if asset_version_comment:
assetversion_entity["comment"] = asset_version_comment
try:
session.commit()
except Exception:
session.rollback()
self.log.warning((
"Comment was not possible to set for AssetVersion"
"\"{0}\". Can't set it's value to: \"{1}\""
).format(
assetversion_entity["id"], str(asset_version_comment)
))
# Adding Custom Attributes
for attr, val in assetversion_cust_attrs.items():
if attr in assetversion_entity["custom_attributes"]:
@ -369,3 +387,14 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
if assetversion_entity not in used_asset_versions:
used_asset_versions.append(assetversion_entity)
asset_versions_key = "ftrackIntegratedAssetVersions"
if asset_versions_key not in instance.data:
instance.data[asset_versions_key] = []
for asset_version in used_asset_versions:
if asset_version not in instance.data[asset_versions_key]:
instance.data[asset_versions_key].append(asset_version)

View file

@ -0,0 +1,51 @@
import sys
import pyblish.api
import six
class IntegrateFtrackNote(pyblish.api.InstancePlugin):
"""Create comments in Ftrack."""
# Must be after integrate asset new
order = pyblish.api.IntegratorOrder + 0.4999
label = "Integrate Ftrack note"
families = ["ftrack"]
optional = True
def process(self, instance):
comment = (instance.context.data.get("comment") or "").strip()
if not comment:
self.log.info("Comment is not set.")
return
self.log.debug("Comment is set to {}".format(comment))
asset_versions_key = "ftrackIntegratedAssetVersions"
asset_versions = instance.data.get(asset_versions_key)
if not asset_versions:
self.log.info("There are any integrated AssetVersions")
return
session = instance.context.data["ftrackSession"]
user = session.query(
"User where username is \"{}\"".format(session.api_user)
).first()
if not user:
self.log.warning(
"Was not able to query current User {}".format(
session.api_user
)
)
for asset_version in asset_versions:
asset_version.create_note(comment, author=user)
try:
session.commit()
self.log.debug("Note added to AssetVersion \"{}\"".format(
str(asset_version)
))
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)

View file

@ -11,13 +11,13 @@ class IntegrateCleanComponentData(pyblish.api.InstancePlugin):
label = 'Clean component data'
families = ["ftrack"]
optional = True
active = True
active = False
def process(self, instance):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
if "%" in comp['published_path'] or "#" in comp['published_path']:
continue

View file

@ -15,4 +15,5 @@ class CollectComment(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
def process(self, context):
context.data["comment"] = ""
comment = (context.data.get("comment") or "").strip()
context.data["comment"] = comment

View file

@ -101,6 +101,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
lut_path = None
slate_frame = None
families_data = None
baked_mov_path = None
subset = None
version = None
frame_start = 0
@ -427,6 +428,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
"name": ext,
"ext": "{}".format(ext),
"files": list(collection),
"frameStart": start,
"frameEnd": end,
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,

View file

@ -107,9 +107,9 @@ class ExtractBurnin(pype.api.Extractor):
# create copy of prep_data for anatomy formatting
_prep_data = copy.deepcopy(prep_data)
_prep_data["representation"] = repre["name"]
_prep_data["anatomy"] = (
anatomy.format_all(_prep_data).get("solved") or {}
)
filled_anatomy = anatomy.format_all(_prep_data)
_prep_data["anatomy"] = filled_anatomy.get_solved()
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"codec": repre.get("codec", []),

View file

@ -6,7 +6,7 @@ import pype.api
class ExtractJpegEXR(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
"""Resolve any dependency issues
This plug-in resolves any paths which, if not updated might break
the published file.
@ -55,8 +55,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
filename = os.path.splitext(input_file)[0]
if not filename.endswith('.'):
filename += "."
jpegFile = filename + "jpg"
full_output_path = os.path.join(stagingdir, jpegFile)
jpeg_file = filename + "jpg"
full_output_path = os.path.join(stagingdir, jpeg_file)
self.log.info("output {}".format(full_output_path))
@ -87,9 +87,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
instance.data["representations"] = []
representation = {
'name': 'jpg',
'name': 'thumbnail',
'ext': 'jpg',
'files': jpegFile,
'files': jpeg_file,
"stagingDir": stagingdir,
"thumbnail": True,
"tags": ['thumbnail']

View file

@ -24,7 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = ["assembly"]
families = []
exclude_families = ["clip"]
def process(self, instance):

View file

@ -76,6 +76,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"source",
"matchmove",
"image"
"source",
"assembly"
]
exclude_families = ["clip"]
@ -326,8 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
index_frame_start = None
if repre.get("frameStart"):
frame_start_padding = len(str(
repre.get("frameEnd")))
frame_start_padding = anatomy.templates["render"]["padding"]
index_frame_start = int(repre.get("frameStart"))
# exception for slate workflow
@ -402,6 +403,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ dst: {}".format(dst))
representation = {
"_id": io.ObjectId(),
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
@ -444,6 +446,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ represNAME: {}".format(rep['name']))
self.log.debug("__ represPATH: {}".format(rep['published_path']))
io.insert_many(representations)
instance.data["published_representations"] = representations
# self.log.debug("Representation: {}".format(representations))
self.log.info("Registered {} items".format(len(representations)))

View file

@ -0,0 +1,139 @@
import os
import sys
import errno
import shutil
import copy
import six
import pyblish.api
from bson.objectid import ObjectId
from avalon import api, io
class IntegrateThumbnails(pyblish.api.InstancePlugin):
"""Integrate Thumbnails."""
label = "Integrate Thumbnails"
order = pyblish.api.IntegratorOrder + 0.01
families = ["review"]
def process(self, instance):
if not os.environ.get("AVALON_THUMBNAIL_ROOT"):
self.log.info("AVALON_THUMBNAIL_ROOT is not set."
" Skipping thumbnail integration.")
return
published_repres = instance.data.get("published_representations")
if not published_repres:
self.log.debug(
"There are not published representation ids on the instance."
)
return
project_name = api.Session["AVALON_PROJECT"]
anatomy = instance.context.data["anatomy"]
if "publish" not in anatomy.templates:
raise AssertionError("Anatomy does not have set publish key!")
if "thumbnail" not in anatomy.templates["publish"]:
raise AssertionError((
"There is not set \"thumbnail\" template for project \"{}\""
).format(project_name))
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
io.install()
thumb_repre = None
for repre in published_repres:
if repre["name"].lower() == "thumbnail":
thumb_repre = repre
break
if not thumb_repre:
self.log.debug(
"There is not representation with name \"thumbnail\""
)
return
version = io.find_one({"_id": thumb_repre["parent"]})
if not version:
raise AssertionError(
"There does not exist version with id {}".format(
str(thumb_repre["parent"])
)
)
# Get full path to thumbnail file from representation
src_full_path = os.path.normpath(thumb_repre["data"]["path"])
if not os.path.exists(src_full_path):
self.log.warning("Thumbnail file was not found. Path: {}".format(
src_full_path
))
return
filename, file_extension = os.path.splitext(src_full_path)
# Create id for mongo entity now to fill anatomy template
thumbnail_id = ObjectId()
# Prepare anatomy template fill data
template_data = copy.deepcopy(thumb_repre["context"])
template_data.update({
"_id": str(thumbnail_id),
"thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"),
"ext": file_extension,
"thumbnail_type": "thumbnail"
})
anatomy_filled = anatomy.format(template_data)
final_path = anatomy_filled.get("publish", {}).get("thumbnail")
if not final_path:
raise AssertionError((
"Anatomy template was not filled with entered data"
"\nTemplate: {} "
"\nData: {}"
).format(thumbnail_template, str(template_data)))
dst_full_path = os.path.normpath(final_path)
self.log.debug(
"Copying file .. {} -> {}".format(src_full_path, dst_full_path)
)
dirname = os.path.dirname(dst_full_path)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
tp, value, tb = sys.exc_info()
six.reraise(tp, value, tb)
shutil.copy(src_full_path, dst_full_path)
# Clean template data from keys that are dynamic
template_data.pop("_id")
template_data.pop("thumbnail_root")
thumbnail_entity = {
"_id": thumbnail_id,
"type": "thumbnail",
"schema": "pype:thumbnail-1.0",
"data": {
"template": thumbnail_template,
"template_data": template_data
}
}
# Create thumbnail entity
io.insert_one(thumbnail_entity)
self.log.debug(
"Creating entity in database {}".format(str(thumbnail_entity))
)
# Set thumbnail id for version
io.update_many(
{"_id": version["_id"]},
{"$set": {"data.thumbnail_id": thumbnail_id}}
)
self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
version["name"], str(version["_id"])
))

View file

@ -162,6 +162,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"FTRACK_API_KEY",
"FTRACK_SERVER",
"PYPE_ROOT",
"PYPE_METADATA_FILE",
"PYPE_STUDIO_PROJECTS_PATH",
"PYPE_STUDIO_PROJECTS_MOUNT"
]
@ -185,7 +186,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT'])
network_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_PATH'])
network_root = os.path.normpath(
os.environ['PYPE_STUDIO_PROJECTS_PATH'])
metadata_path = metadata_path.replace(mount_root, network_root)
@ -204,7 +206,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"PluginInfo": {
"Version": "3.6",
"ScriptFile": _get_script(),
"Arguments": '--paths "{}"'.format(metadata_path),
"Arguments": "",
"SingleFrameOnly": "True"
},
@ -216,7 +218,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# job so they use the same environment
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path
i = 0
for index, key in enumerate(environment):
self.log.info("KEY: {}".format(key))
@ -254,6 +256,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"""
# Get a submission job
data = instance.data.copy()
if hasattr(instance, "_log"):
data['_log'] = instance._log
render_job = data.pop("deadlineSubmissionJob", None)
submission_type = "deadline"

View file

@ -140,9 +140,9 @@ class ImportMayaLoader(api.Loader):
message = "Are you sure you want import this"
state = QtWidgets.QMessageBox.warning(None,
"Are you sure?",
message,
buttons=buttons,
defaultButton=accept)
"Are you sure?",
message,
buttons=buttons,
defaultButton=accept)
return state == accept

View file

@ -1,62 +0,0 @@
import pype.maya.plugin
import os
from pypeapp import config
class CameraLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the pype.camera family"""
families = ["camera"]
label = "Reference camera"
representations = ["abc", "ma"]
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
# Get family type from the context
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "camera"
cmds.loadPlugin("AbcImport.mll", quiet=True)
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
cameras = cmds.ls(nodes, type="camera")
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
# Check the Maya version, lockTransform has been introduced since
# Maya 2016.5 Ext 2
version = int(cmds.about(version=True))
if version >= 2016:
for camera in cameras:
cmds.camera(camera, edit=True, lockTransform=True)
else:
self.log.warning("This version of Maya does not support locking of"
" transforms of cameras.")
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,54 +0,0 @@
import pype.maya.plugin
import os
from pypeapp import config
class FBXLoader(pype.maya.plugin.ReferenceLoader):
"""Load the FBX"""
families = ["fbx"]
representations = ["fbx"]
label = "Reference FBX"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "fbx"
# Ensure FBX plug-in is loaded
cmds.loadPlugin("fbxmaya", quiet=True)
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name))
groupName = "{}:{}".format(namespace, name)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,68 +0,0 @@
import pype.maya.plugin
from pypeapp import config
import os
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
families = ["mayaAscii",
"setdress",
"layout"]
representations = ["ma"]
label = "Reference Maya Ascii"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "model"
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name))
self[:] = nodes
groupName = "{}:{}".format(namespace, name)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
cmds.setAttr(groupName + ".displayHandle", 1)
# get bounding box
bbox = cmds.exactWorldBoundingBox(groupName)
# get pivot position on world space
pivot = cmds.xform(groupName, q=True, sp=True, ws=True)
# center of bounding box
cx = (bbox[0] + bbox[3]) / 2
cy = (bbox[1] + bbox[4]) / 2
cz = (bbox[2] + bbox[5]) / 2
# add pivot position to calculate offset
cx = cx + pivot[0]
cy = cy + pivot[1]
cz = cz + pivot[2]
# set selection handle offset to center of bounding box
cmds.setAttr(groupName + ".selectHandleX", cx)
cmds.setAttr(groupName + ".selectHandleY", cy)
cmds.setAttr(groupName + ".selectHandleZ", cz)
return nodes
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
from avalon import api, maya
from maya import cmds
import os
from pypeapp import config
@ -6,8 +8,15 @@ from pypeapp import config
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
families = ["model", "pointcache", "animation"]
representations = ["ma", "abc"]
families = ["model",
"pointcache",
"animation",
"mayaAscii",
"setdress",
"layout",
"camera",
"rig"]
representations = ["ma", "abc", "fbx"]
tool_names = ["loader"]
label = "Reference"
@ -37,27 +46,29 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
reference=True,
returnNewNodes=True)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
# namespace = cmds.referenceQuery(nodes[0], namespace=True)
shapes = cmds.ls(nodes, shapes=True, long=True)
print(shapes)
newNodes = (list(set(nodes) - set(shapes)))
print(newNodes)
current_namespace = pm.namespaceInfo(currentNamespace=True)
if current_namespace != ":":
groupName = current_namespace + ":" + groupName
groupNode = pm.PyNode(groupName)
roots = set()
print(nodes)
for node in newNodes:
try:
roots.add(pm.PyNode(node).getAllParents()[-2])
except:
except: # noqa: E722
pass
for root in roots:
root.setParent(world=True)
groupNode.root().zeroTransformPivots()
groupNode.zeroTransformPivots()
for root in roots:
root.setParent(groupNode)
@ -90,23 +101,39 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".selectHandleY", cy)
cmds.setAttr(groupName + ".selectHandleZ", cz)
if data.get("post_process", True):
if family == "rig":
self._post_process_rig(name, namespace, context, data)
return newNodes
def switch(self, container, representation):
self.update(container, representation)
def _post_process_rig(self, name, namespace, context, data):
# for backwards compatibility
class AbcLoader(ReferenceLoader):
label = "Deprecated loader (don't use)"
families = ["pointcache", "animation"]
representations = ["abc"]
tool_names = []
output = next((node for node in self if
node.endswith("out_SET")), None)
controls = next((node for node in self if
node.endswith("controls_SET")), None)
assert output, "No out_SET in rig, this is a bug."
assert controls, "No controls_SET in rig, this is a bug."
# for backwards compatibility
class ModelLoader(ReferenceLoader):
label = "Deprecated loader (don't use)"
families = ["model", "pointcache"]
representations = ["abc"]
tool_names = []
# Find the roots amongst the loaded nodes
roots = cmds.ls(self[:], assemblies=True, long=True)
assert roots, "No root nodes in rig, this is a bug."
asset = api.Session["AVALON_ASSET"]
dependency = str(context["representation"]["_id"])
self.log.info("Creating subset: {}".format(namespace))
# Create the animation instance
with maya.maintained_selection():
cmds.select([output, controls] + roots, noExpand=True)
api.create(name=namespace,
asset=asset,
family="animation",
options={"useSelection": True},
data={"dependencies": dependency})

View file

@ -1,95 +0,0 @@
from maya import cmds
import pype.maya.plugin
from avalon import api, maya
import os
from pypeapp import config
class RigLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader for rigs
This automatically creates an instance for animators upon load.
"""
families = ["rig"]
representations = ["ma"]
label = "Reference rig"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "rig"
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName=groupName)
cmds.xform(groupName, pivots=(0, 0, 0))
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
shapes = cmds.ls(nodes, shapes=True, long=True)
print(shapes)
newNodes = (list(set(nodes) - set(shapes)))
print(newNodes)
# Store for post-process
self[:] = newNodes
if data.get("post_process", True):
self._post_process(name, namespace, context, data)
return newNodes
def _post_process(self, name, namespace, context, data):
# TODO(marcus): We are hardcoding the name "out_SET" here.
# Better register this keyword, so that it can be used
# elsewhere, such as in the Integrator plug-in,
# without duplication.
output = next((node for node in self if
node.endswith("out_SET")), None)
controls = next((node for node in self if
node.endswith("controls_SET")), None)
assert output, "No out_SET in rig, this is a bug."
assert controls, "No controls_SET in rig, this is a bug."
# Find the roots amongst the loaded nodes
roots = cmds.ls(self[:], assemblies=True, long=True)
assert roots, "No root nodes in rig, this is a bug."
asset = api.Session["AVALON_ASSET"]
dependency = str(context["representation"]["_id"])
# Create the animation instance
with maya.maintained_selection():
cmds.select([output, controls] + roots, noExpand=True)
api.create(name=namespace,
asset=asset,
family="animation",
options={"useSelection": True},
data={"dependencies": dependency})
def switch(self, container, representation):
self.update(container, representation)

View file

@ -117,7 +117,7 @@ class VRayProxyLoader(api.Loader):
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
name="{}_VRMM".format(name))
name="{}_VRMM".format(name))
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
empty=True,
renderable=True,

View file

@ -22,11 +22,11 @@ class ExtractAssembly(pype.api.Extractor):
def process(self, instance):
parent_dir = self.staging_dir(instance)
staging_dir = self.staging_dir(instance)
hierarchy_filename = "{}.abc".format(instance.name)
hierarchy_path = os.path.join(parent_dir, hierarchy_filename)
hierarchy_path = os.path.join(staging_dir, hierarchy_filename)
json_filename = "{}.json".format(instance.name)
json_path = os.path.join(parent_dir, json_filename)
json_path = os.path.join(staging_dir, json_filename)
self.log.info("Dumping scene data for debugging ..")
with open(json_path, "w") as filepath:
@ -46,8 +46,24 @@ class ExtractAssembly(pype.api.Extractor):
"uvWrite": True,
"selection": True})
instance.data["files"] = [json_filename, hierarchy_filename]
if "representations" not in instance.data:
instance.data["representations"] = []
representation_abc = {
'name': 'abc',
'ext': 'abc',
'files': hierarchy_filename,
"stagingDir": staging_dir
}
instance.data["representations"].append(representation_abc)
representation_json = {
'name': 'json',
'ext': 'json',
'files': json_filename,
"stagingDir": staging_dir
}
instance.data["representations"].append(representation_json)
# Remove data
instance.data.pop("scenedata", None)

View file

@ -1,16 +1,14 @@
import os
import glob
import contextlib
import capture_gui
import clique
import capture
#
import pype.maya.lib as lib
import pype.api
#
from maya import cmds, mel
import pymel.core as pm
# import ffmpeg
# # from pype.scripts import otio_burnin
# reload(ffmpeg)
# TODO: move codec settings to presets
@ -93,7 +91,18 @@ class ExtractQuicktime(pype.api.Extractor):
pm.currentTime(refreshFrameInt, edit=True)
with maintained_time():
playblast = capture_gui.lib.capture_scene(preset)
filename = preset.get("filename", "%TEMP%")
# Force viewer to False in call to capture because we have our own
# viewer opening call to allow a signal to trigger between playblast
# and viewer
preset['viewer'] = False
# Remove panel key since it's internal value to capture_gui
preset.pop("panel", None)
path = capture.capture(**preset)
playblast = self._fix_playblast_output_path(path)
self.log.info("file list {}".format(playblast))
@ -119,6 +128,46 @@ class ExtractQuicktime(pype.api.Extractor):
}
instance.data["representations"].append(representation)
def _fix_playblast_output_path(self, filepath):
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
When the `viewer` argument is set to False and maya.cmds.playblast
does not automatically open the playblasted file the returned
filepath does not have the file's extension added correctly.
To workaround this we just glob.glob() for any file extensions and
assume the latest modified file is the correct file and return it.
"""
# Catch cancelled playblast
if filepath is None:
self.log.warning("Playblast did not result in output path. "
"Playblast is probably interrupted.")
return None
# Fix: playblast not returning correct filename (with extension)
# Lets assume the most recently modified file is the correct one.
if not os.path.exists(filepath):
directory = os.path.dirname(filepath)
filename = os.path.basename(filepath)
# check if the filepath is has frame based filename
# example : capture.####.png
parts = filename.split(".")
if len(parts) == 3:
query = os.path.join(directory, "{}.*.{}".format(parts[0],
parts[-1]))
files = glob.glob(query)
else:
files = glob.glob("{}.*".format(filepath))
if not files:
raise RuntimeError("Couldn't find playblast from: "
"{0}".format(filepath))
filepath = max(files, key=os.path.getmtime)
return filepath
@contextlib.contextmanager
def maintained_time():

View file

@ -1,31 +1,14 @@
import os
import contextlib
import time
import sys
import glob
import capture_gui
import clique
import capture
import pype.maya.lib as lib
import pype.api
from maya import cmds
import pymel.core as pm
# import ffmpeg
# reload(ffmpeg)
import avalon.maya
# import maya_utils as mu
# from tweakHUD import master
# from tweakHUD import draft_hud as dHUD
# from tweakHUD import ftrackStrings as fStrings
#
# def soundOffsetFunc(oSF, SF, H):
# tmOff = (oSF - H) - SF
# return tmOff
class ExtractThumbnail(pype.api.Extractor):
@ -47,39 +30,8 @@ class ExtractThumbnail(pype.api.Extractor):
end = cmds.currentTime(query=True)
self.log.info("start: {}, end: {}".format(start, end))
members = instance.data['setMembers']
camera = instance.data['review_camera']
# project_code = ftrack_data['Project']['code']
# task_type = ftrack_data['Task']['type']
#
# # load Preset
# studio_repos = os.path.abspath(os.environ.get('studio_repos'))
# shot_preset_path = os.path.join(studio_repos, 'maya',
# 'capture_gui_presets',
# (project_code + '_' + task_type + '_' + asset + '.json'))
#
# task_preset_path = os.path.join(studio_repos, 'maya',
# 'capture_gui_presets',
# (project_code + '_' + task_type + '.json'))
#
# project_preset_path = os.path.join(studio_repos, 'maya',
# 'capture_gui_presets',
# (project_code + '.json'))
#
# default_preset_path = os.path.join(studio_repos, 'maya',
# 'capture_gui_presets',
# 'default.json')
#
# if os.path.isfile(shot_preset_path):
# preset_to_use = shot_preset_path
# elif os.path.isfile(task_preset_path):
# preset_to_use = task_preset_path
# elif os.path.isfile(project_preset_path):
# preset_to_use = project_preset_path
# else:
# preset_to_use = default_preset_path
capture_preset = ""
capture_preset = instance.context.data['presets']['maya']['capture']
try:
@ -126,7 +78,18 @@ class ExtractThumbnail(pype.api.Extractor):
pm.currentTime(refreshFrameInt, edit=True)
with maintained_time():
playblast = capture_gui.lib.capture_scene(preset)
filename = preset.get("filename", "%TEMP%")
# Force viewer to False in call to capture because we have our own
# viewer opening call to allow a signal to trigger between
# playblast and viewer
preset['viewer'] = False
# Remove panel key since it's internal value to capture_gui
preset.pop("panel", None)
path = capture.capture(**preset)
playblast = self._fix_playblast_output_path(path)
_, thumbnail = os.path.split(playblast)
@ -144,6 +107,45 @@ class ExtractThumbnail(pype.api.Extractor):
}
instance.data["representations"].append(representation)
def _fix_playblast_output_path(self, filepath):
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
When the `viewer` argument is set to False and maya.cmds.playblast
does not automatically open the playblasted file the returned
filepath does not have the file's extension added correctly.
To workaround this we just glob.glob() for any file extensions and
assume the latest modified file is the correct file and return it.
"""
# Catch cancelled playblast
if filepath is None:
self.log.warning("Playblast did not result in output path. "
"Playblast is probably interrupted.")
return None
# Fix: playblast not returning correct filename (with extension)
# Lets assume the most recently modified file is the correct one.
if not os.path.exists(filepath):
directory = os.path.dirname(filepath)
filename = os.path.basename(filepath)
# check if the filepath is has frame based filename
# example : capture.####.png
parts = filename.split(".")
if len(parts) == 3:
query = os.path.join(directory, "{}.*.{}".format(parts[0],
parts[-1]))
files = glob.glob(query)
else:
files = glob.glob("{}.*".format(filepath))
if not files:
raise RuntimeError("Couldn't find playblast from: "
"{0}".format(filepath))
filepath = max(files, key=os.path.getmtime)
return filepath
@contextlib.contextmanager
def maintained_time():

View file

@ -228,80 +228,19 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"AuxFiles": []
}
# Include critical environment variables with submission
# We need those to pass them to pype for it to set correct context
keys = [
# This will trigger `userSetup.py` on the slave
# such that proper initialisation happens the same
# way as it does on a local machine.
# TODO(marcus): This won't work if the slaves don't
# have accesss to these paths, such as if slaves are
# running Linux and the submitter is on Windows.
"PYTHONPATH",
"PATH",
"MTOA_EXTENSIONS_PATH",
"MTOA_EXTENSIONS",
"DYLD_LIBRARY_PATH",
"MAYA_RENDER_DESC_PATH",
"MAYA_MODULE_PATH",
"ARNOLD_PLUGIN_PATH",
"AVALON_SCHEMA",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"PYBLISHPLUGINPATH",
# todo: This is a temporary fix for yeti variables
"PEREGRINEL_LICENSE",
"SOLIDANGLE_LICENSE",
"ARNOLD_LICENSE"
"MAYA_MODULE_PATH",
"TOOL_ENV"
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"PYPE_USERNAME"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
# self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
clean_path = ""
self.log.debug("key: {}".format(key))
self.log.debug("value: {}".format(environment[key]))
to_process = str(environment[key])
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = to_process
elif "://" in to_process:
clean_path = to_process
elif os.pathsep not in str(to_process):
try:
path = to_process
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
else:
for path in to_process.split(os.pathsep):
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
except UnicodeDecodeError:
print('path contains non UTF characters')
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
clean_path = clean_path.replace(
os.path.normpath(
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
clean_environment[key] = clean_path
environment = clean_environment
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@ -319,7 +258,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.preflight_check(instance)
self.log.info("Submitting..")
self.log.info("Submitting ...")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs

View file

@ -35,8 +35,10 @@ class CreateBackdrop(Creator):
return instance
else:
nuke.message("Please select nodes you "
"wish to add to a container")
msg = "Please select nodes you "
"wish to add to a container"
self.log.error(msg)
nuke.message(msg)
return
else:
bckd_node = autoBackdrop()

View file

@ -36,8 +36,10 @@ class CreateGizmo(Creator):
node["tile_color"].setValue(int(self.node_color, 16))
return anlib.imprint(node, self.data)
else:
nuke.message("Please select a group node "
"you wish to publish as the gizmo")
msg = ("Please select a group node "
"you wish to publish as the gizmo")
self.log.error(msg)
nuke.message(msg)
if len(nodes) >= 2:
anlib.select_nodes(nodes)
@ -58,8 +60,10 @@ class CreateGizmo(Creator):
return anlib.imprint(gizmo_node, self.data)
else:
nuke.message("Please select nodes you "
"wish to add to the gizmo")
msg = ("Please select nodes you "
"wish to add to the gizmo")
self.log.error(msg)
nuke.message(msg)
return
else:
with anlib.maintained_selection():

View file

@ -34,7 +34,9 @@ class CrateRead(avalon.nuke.Creator):
nodes = self.nodes
if not nodes or len(nodes) == 0:
nuke.message('Please select Read node')
msg = "Please select Read node"
self.log.error(msg)
nuke.message(msg)
else:
count_reads = 0
for node in nodes:
@ -46,7 +48,9 @@ class CrateRead(avalon.nuke.Creator):
count_reads += 1
if count_reads < 1:
nuke.message('Please select Read node')
msg = "Please select Read node"
self.log.error(msg)
nuke.message(msg)
return
def change_read_node(self, name, node, data):

View file

@ -41,9 +41,11 @@ class CreateWriteRender(plugin.PypeCreator):
if (self.options or {}).get("useSelection"):
nodes = self.nodes
assert len(nodes) < 2, self.log.error(
"Select only one node. The node you want to connect to, "
"or tick off `Use selection`")
if not (len(nodes) < 2):
msg = ("Select only one node. The node you want to connect to, "
"or tick off `Use selection`")
log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
@ -134,7 +136,11 @@ class CreateWritePrerender(plugin.PypeCreator):
if (self.options or {}).get("useSelection"):
nodes = self.nodes
assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`")
if not (len(nodes) < 2):
msg = ("Select only one node. The node you want to connect to, "
"or tick off `Use selection`")
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]

View file

@ -256,8 +256,11 @@ class LoadBackdropNodes(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
self.log.error("Please create Viewer node before you "
"run this action again")
if not (len(nodes) < 2):
msg = "Please create Viewer node before you "
"run this action again"
self.log.error(msg)
nuke.message(msg)
return None
# get coordinates of Viewer1

View file

@ -176,8 +176,10 @@ class LoadGizmoInputProcess(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
self.log.error("Please create Viewer node before you "
"run this action again")
msg = "Please create Viewer node before you "
"run this action again"
self.log.error(msg)
nuke.message(msg)
return None
# get coordinates of Viewer1

View file

@ -276,7 +276,10 @@ class LoadLutsInputProcess(api.Loader):
if len(viewer) > 0:
viewer = viewer[0]
else:
self.log.error("Please create Viewer node before you run this action again")
msg = "Please create Viewer node before you "
"run this action again"
self.log.error(msg)
nuke.message(msg)
return None
# get coordinates of Viewer1

View file

@ -1,4 +1,5 @@
from avalon import api
import nuke
class MatchmoveLoader(api.Loader):
@ -19,6 +20,8 @@ class MatchmoveLoader(api.Loader):
exec(open(self.fname).read())
else:
self.log.error("Unsupported script type")
msg = "Unsupported script type"
self.log.error(msg)
nuke.message(msg)
return True

View file

@ -73,7 +73,7 @@ class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["write", "source", "plate", "render"]
representations = ["exr", "dpx", "jpg", "jpeg"]
representations = ["exr", "dpx", "jpg", "jpeg", "png"]
label = "Load sequence"
order = -10

View file

@ -72,8 +72,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"publish": root.knob('publish').value(),
"family": family,
"families": [family],
"representations": list(),
"subsetGroup": "workfiles"
"representations": list()
})
# adding basic script data

View file

@ -127,8 +127,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"families": families,
"colorspace": node["colorspace"].value(),
"deadlineChunkSize": deadlineChunkSize,
"deadlinePriority": deadlinePriority,
"subsetGroup": "renders"
"deadlinePriority": deadlinePriority
})
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -41,7 +41,7 @@ class ExtractReviewDataLut(pype.api.Extractor):
with anlib.maintained_selection():
exporter = pnlib.ExporterReviewLut(
self, instance
)
)
data = exporter.generate_lut()
# assign to representations

View file

@ -0,0 +1,78 @@
import nuke
import pyblish.api
class RepairWriteResolutionDifference(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
# Get the errored instances
failed = []
for result in context.data["results"]:
if (result["error"] is not None and result["instance"] is not None
and result["instance"] not in failed):
failed.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
reformat = instance[0].dependencies()[0]
if reformat.Class() != "Reformat":
reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)])
xpos = instance[0].xpos()
ypos = instance[0].ypos() - 26
dependent_ypos = instance[0].dependencies()[0].ypos()
if (instance[0].ypos() - dependent_ypos) <= 51:
xpos += 110
reformat.setXYpos(xpos, ypos)
instance[0].setInput(0, reformat)
reformat["resize"].setValue("none")
class ValidateOutputResolution(pyblish.api.InstancePlugin):
"""Validates Output Resolution.
It is making sure the resolution of write's input is the same as
Format definition of script in Root node.
"""
order = pyblish.api.ValidatorOrder
optional = True
families = ["render", "render.local", "render.farm"]
label = "Write Resolution"
hosts = ["nuke"]
actions = [RepairWriteResolutionDifference]
def process(self, instance):
# Skip bounding box check if a crop node exists.
if instance[0].dependencies()[0].Class() == "Crop":
return
msg = "Bounding box is outside the format."
assert self.check_resolution(instance), msg
def check_resolution(self, instance):
node = instance[0]
root_width = instance.data["resolutionWidth"]
root_height = instance.data["resolutionHeight"]
write_width = node.format().width()
write_height = node.format().height()
if (root_width != write_width) or (root_height != write_height):
return None
else:
return True

View file

@ -41,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if not repre.get('files'):
msg = ("no frames were collected, "
"you need to render them")
self.log.warning(msg)
self.log.error(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])
@ -75,7 +75,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
self.log.info(
'len(collection.indexes): {}'.format(collected_frames_len)
)
if "slate" in instance.data["families"]:
collected_frames_len -= 1

View file

@ -57,7 +57,7 @@ class ValidateNukeWriteBoundingBox(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder
optional = True
families = ["render"]
families = ["render", "render.local", "render.farm"]
label = "Write Bounding Box"
hosts = ["nuke"]
actions = [RepairNukeBoundingBoxAction]

View file

@ -106,8 +106,8 @@ class CollectClips(api.ContextPlugin):
"family": "clip",
"families": [],
"handles": 0,
"handleStart": projectdata.get("handles", 0),
"handleEnd": projectdata.get("handles", 0),
"handleStart": projectdata.get("handleStart", 0),
"handleEnd": projectdata.get("handleEnd", 0),
"version": int(version)})
instance = context.create_instance(**data)

View file

@ -21,7 +21,7 @@ class CollectMatchmovePublish(pyblish.api.InstancePlugin):
label = "Collect Matchmove - SA Publish"
order = pyblish.api.CollectorOrder
family = ["matchmove"]
families = ["matchmove"]
hosts = ["standalonepublisher"]
def process(self, instance):

View file

@ -1,9 +1,12 @@
"""This module is used for command line publishing of image sequences."""
import os
import sys
import argparse
import logging
import subprocess
import platform
try:
from shutil import which
except ImportError:
@ -23,7 +26,6 @@ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
def __main__():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--paths",
nargs="*",
@ -43,7 +45,11 @@ def __main__():
print("Running pype ...")
auto_pype_root = os.path.dirname(os.path.abspath(__file__))
auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..")
auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root
if os.environ.get('PYPE_ROOT'):
print("Got Pype location from environment: {}".format(
os.environ.get('PYPE_ROOT')))
pype_command = "pype.ps1"
if platform.system().lower() == "linux":
@ -69,7 +75,7 @@ def __main__():
print("Set pype root to: {}".format(pype_root))
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
paths = kwargs.paths or [os.getcwd()]
paths = kwargs.paths or [os.environ.get("PYPE_METADATA_FILE")] or [os.getcwd()] # noqa
args = [
os.path.join(pype_root, pype_command),

View file

@ -1 +1 @@
__version__ = '1.3.3'
__version__ = '1.8.2'

66
pype/vendor/ftrack_api_old/_weakref.py vendored Normal file
View file

@ -0,0 +1,66 @@
"""
Yet another backport of WeakMethod for Python 2.7.
Changes include removing exception chaining and adding args to super() calls.
Copyright (c) 2001-2019 Python Software Foundation.All rights reserved.
Full license available in LICENSE.python.
"""
from weakref import ref
class WeakMethod(ref):
"""
A custom `weakref.ref` subclass which simulates a weak reference to
a bound method, working around the lifetime problem of bound methods.
"""
__slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__"
def __new__(cls, meth, callback=None):
try:
obj = meth.__self__
func = meth.__func__
except AttributeError:
raise TypeError(
"argument should be a bound method, not {}".format(type(meth))
)
def _cb(arg):
# The self-weakref trick is needed to avoid creating a reference
# cycle.
self = self_wr()
if self._alive:
self._alive = False
if callback is not None:
callback(self)
self = ref.__new__(cls, obj, _cb)
self._func_ref = ref(func, _cb)
self._meth_type = type(meth)
self._alive = True
self_wr = ref(self)
return self
def __call__(self):
obj = super(WeakMethod, self).__call__()
func = self._func_ref()
if obj is None or func is None:
return None
return self._meth_type(func, obj)
def __eq__(self, other):
if isinstance(other, WeakMethod):
if not self._alive or not other._alive:
return self is other
return ref.__eq__(self, other) and self._func_ref == other._func_ref
return NotImplemented
def __ne__(self, other):
if isinstance(other, WeakMethod):
if not self._alive or not other._alive:
return self is not other
return ref.__ne__(self, other) or self._func_ref != other._func_ref
return NotImplemented
__hash__ = ref.__hash__

View file

@ -148,7 +148,8 @@ class Attribute(object):
'''A name and value pair persisted remotely.'''
def __init__(
self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True
self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True,
computed=False
):
'''Initialise attribute with *name*.
@ -161,10 +162,14 @@ class Attribute(object):
are :attr:`ftrack_api_old.symbol.NOT_SET`. The exception to this is when the
target value is also :attr:`ftrack_api_old.symbol.NOT_SET`.
If *computed* is set to True the value is a remote side computed value
and should not be long-term cached.
'''
super(Attribute, self).__init__()
self._name = name
self._mutable = mutable
self._computed = computed
self.default_value = default_value
self._local_key = 'local'
@ -205,6 +210,11 @@ class Attribute(object):
'''Return whether attribute is mutable.'''
return self._mutable
@property
def computed(self):
'''Return whether attribute is computed.'''
return self._computed
def get_value(self, entity):
'''Return current value for *entity*.

View file

@ -49,9 +49,11 @@ class Factory(object):
# Build attributes for class.
attributes = ftrack_api_old.attribute.Attributes()
immutable = schema.get('immutable', [])
immutable_properties = schema.get('immutable', [])
computed_properties = schema.get('computed', [])
for name, fragment in schema.get('properties', {}).items():
mutable = name not in immutable
mutable = name not in immutable_properties
computed = name in computed_properties
default = fragment.get('default', ftrack_api_old.symbol.NOT_SET)
if default == '{uid}':
@ -62,7 +64,8 @@ class Factory(object):
if data_type is not ftrack_api_old.symbol.NOT_SET:
if data_type in (
'string', 'boolean', 'integer', 'number', 'variable'
'string', 'boolean', 'integer', 'number', 'variable',
'object'
):
# Basic scalar attribute.
if data_type == 'number':
@ -74,7 +77,7 @@ class Factory(object):
data_type = 'datetime'
attribute = self.create_scalar_attribute(
class_name, name, mutable, default, data_type
class_name, name, mutable, computed, default, data_type
)
if attribute:
attributes.add(attribute)
@ -139,11 +142,12 @@ class Factory(object):
return cls
def create_scalar_attribute(
self, class_name, name, mutable, default, data_type
self, class_name, name, mutable, computed, default, data_type
):
'''Return appropriate scalar attribute instance.'''
return ftrack_api_old.attribute.ScalarAttribute(
name, data_type=data_type, default_value=default, mutable=mutable
name, data_type=data_type, default_value=default, mutable=mutable,
computed=computed
)
def create_reference_attribute(self, class_name, name, mutable, reference):

View file

@ -526,7 +526,8 @@ class Location(ftrack_api_old.entity.base.Entity):
for index, resource_identifier in enumerate(resource_identifiers):
resource_identifiers[index] = (
self.resource_identifier_transformer.decode(
resource_identifier
resource_identifier,
context={'component': components[index]}
)
)

View file

@ -1,6 +1,8 @@
# :coding: utf-8
# :copyright: Copyright (c) 2015 ftrack
import warnings
import ftrack_api_old.entity.base
@ -33,26 +35,52 @@ class Note(ftrack_api_old.entity.base.Entity):
class CreateNoteMixin(object):
'''Mixin to add create_note method on entity class.'''
def create_note(self, content, author, recipients=None, category=None):
def create_note(
self, content, author, recipients=None, category=None, labels=None
):
'''Create note with *content*, *author*.
Note category can be set by including *category* and *recipients*
can be specified as a list of user or group instances.
NoteLabels can be set by including *labels*.
Note category can be set by including *category*.
*recipients* can be specified as a list of user or group instances.
'''
note_label_support = 'NoteLabel' in self.session.types
if not labels:
labels = []
if labels and not note_label_support:
raise ValueError(
'NoteLabel is not supported by the current server version.'
)
if category and labels:
raise ValueError(
'Both category and labels cannot be set at the same time.'
)
if not recipients:
recipients = []
category_id = None
if category:
category_id = category['id']
data = {
'content': content,
'author': author,
'category_id': category_id
'author': author
}
if category:
if note_label_support:
labels = [category]
warnings.warn(
'category argument will be removed in an upcoming version, '
'please use labels instead.',
PendingDeprecationWarning
)
else:
data['category_id'] = category['id']
note = self.session.create('Note', data)
self['notes'].append(note)
@ -65,4 +93,13 @@ class CreateNoteMixin(object):
note['recipients'].append(recipient)
for label in labels:
self.session.create(
'NoteLabelLink',
{
'label_id': label['id'],
'note_id': note['id']
}
)
return note

View file

@ -3,14 +3,15 @@
from operator import eq, ne, ge, le, gt, lt
from pyparsing import (ParserElement, Group, Word, CaselessKeyword, Forward,
from pyparsing import (Group, Word, CaselessKeyword, Forward,
FollowedBy, Suppress, oneOf, OneOrMore, Optional,
alphanums, quotedString, removeQuotes)
import ftrack_api_old.exception
# Optimise parsing using packrat memoisation feature.
ParserElement.enablePackrat()
# Do not enable packrat since it is not thread-safe and will result in parsing
# exceptions in a multi threaded environment.
# ParserElement.enablePackrat()
class Parser(object):

View file

@ -14,6 +14,7 @@ import operator
import functools
import json
import socket
import warnings
import requests
import requests.exceptions
@ -40,9 +41,20 @@ ServerDetails = collections.namedtuple('ServerDetails', [
])
class EventHub(object):
'''Manage routing of events.'''
_future_signature_warning = (
'When constructing your Session object you did not explicitly define '
'auto_connect_event_hub as True even though you appear to be publishing '
'and / or subscribing to asynchronous events. In version version 2.0 of '
'the ftrack-python-api the default behavior will change from True '
'to False. Please make sure to update your tools. You can read more at '
'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html'
)
def __init__(self, server_url, api_user, api_key):
'''Initialise hub, connecting to ftrack *server_url*.
@ -76,6 +88,8 @@ class EventHub(object):
self._auto_reconnect_attempts = 30
self._auto_reconnect_delay = 10
self._deprecation_warning_auto_connect = False
# Mapping of Socket.IO codes to meaning.
self._code_name_mapping = {
'0': 'disconnect',
@ -134,6 +148,9 @@ class EventHub(object):
connected or connection fails.
'''
self._deprecation_warning_auto_connect = False
if self.connected:
raise ftrack_api_old.exception.EventHubConnectionError(
'Already connected.'
@ -164,17 +181,26 @@ class EventHub(object):
# https://docs.python.org/2/library/socket.html#socket.socket.setblocking
self._connection = websocket.create_connection(url, timeout=60)
except Exception:
except Exception as error:
error_message = (
'Failed to connect to event server at {server_url} with '
'error: "{error}".'
)
error_details = {
'error': unicode(error),
'server_url': self.get_server_url()
}
self.logger.debug(
L(
'Error connecting to event server at {0}.',
self.get_server_url()
error_message, **error_details
),
exc_info=1
)
raise ftrack_api_old.exception.EventHubConnectionError(
'Failed to connect to event server at {0}.'
.format(self.get_server_url())
error_message,
details=error_details
)
# Start background processing thread.
@ -543,6 +569,11 @@ class EventHub(object):
event will be caught by this method and ignored.
'''
if self._deprecation_warning_auto_connect and not synchronous:
warnings.warn(
self._future_signature_warning, FutureWarning
)
try:
return self._publish(
event, synchronous=synchronous, on_reply=on_reply
@ -700,18 +731,23 @@ class EventHub(object):
# Automatically publish a non None response as a reply when not in
# synchronous mode.
if not synchronous and response is not None:
try:
self.publish_reply(
event, data=response, source=subscriber.metadata
if not synchronous:
if self._deprecation_warning_auto_connect:
warnings.warn(
self._future_signature_warning, FutureWarning
)
except Exception:
self.logger.exception(L(
'Error publishing response {0} from subscriber {1} '
'for event {2}.', response, subscriber, event
))
if response is not None:
try:
self.publish_reply(
event, data=response, source=subscriber.metadata
)
except Exception:
self.logger.exception(L(
'Error publishing response {0} from subscriber {1} '
'for event {2}.', response, subscriber, event
))
# Check whether to continue processing topic event.
if event.is_stopped():
@ -881,6 +917,7 @@ class EventHub(object):
if code_name == 'connect':
self.logger.debug('Connected to event server.')
event = ftrack_api_old.event.base.Event('ftrack.meta.connected')
self._prepare_event(event)
self._event_queue.put(event)
elif code_name == 'disconnect':
@ -901,6 +938,7 @@ class EventHub(object):
if not self.connected:
event = ftrack_api_old.event.base.Event('ftrack.meta.disconnected')
self._prepare_event(event)
self._event_queue.put(event)
elif code_name == 'heartbeat':

View file

@ -1,6 +1,23 @@
# :coding: utf-8
# :copyright: Copyright (c) 2016 ftrack
import functools
import warnings
def deprecation_warning(message):
def decorator(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
warnings.warn(
message,
PendingDeprecationWarning
)
return function(*args, **kwargs)
return wrapper
return decorator
class LazyLogMessage(object):
'''A log message that can be evaluated lazily for improved performance.

View file

@ -16,6 +16,7 @@ import hashlib
import tempfile
import threading
import atexit
import warnings
import requests
import requests.auth
@ -42,8 +43,14 @@ import ftrack_api_old.structure.origin
import ftrack_api_old.structure.entity_id
import ftrack_api_old.accessor.server
import ftrack_api_old._centralized_storage_scenario
import ftrack_api_old.logging
from ftrack_api_old.logging import LazyLogMessage as L
try:
from weakref import WeakMethod
except ImportError:
from ftrack_api_old._weakref import WeakMethod
class SessionAuthentication(requests.auth.AuthBase):
'''Attach ftrack session authentication information to requests.'''
@ -69,7 +76,7 @@ class Session(object):
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=True, schema_cache_path=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None
):
'''Initialise session.
@ -233,7 +240,8 @@ class Session(object):
self._api_key
)
if auto_connect_event_hub:
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
@ -242,8 +250,14 @@ class Session(object):
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
atexit.register(WeakMethod(self.close))
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
@ -271,6 +285,15 @@ class Session(object):
ftrack_api_old._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api_old.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)
def __enter__(self):
'''Return session as context manager.'''
@ -389,7 +412,8 @@ class Session(object):
try:
self.event_hub.disconnect()
self._auto_connect_event_hub_thread.join()
if self._auto_connect_event_hub_thread:
self._auto_connect_event_hub_thread.join()
except ftrack_api_old.exception.EventHubConnectionError:
pass
@ -428,6 +452,16 @@ class Session(object):
# Re-configure certain session aspects that may be dependant on cache.
self._configure_locations()
self.event_hub.publish(
ftrack_api_old.event.base.Event(
topic='ftrack.api.session.reset',
data=dict(
session=self
)
),
synchronous=True
)
def auto_populating(self, auto_populate):
'''Temporarily set auto populate to *auto_populate*.
@ -508,7 +542,7 @@ class Session(object):
'entity_key': entity.get('id')
})
result = self._call(
result = self.call(
[payload]
)
@ -790,12 +824,13 @@ class Session(object):
}]
# TODO: When should this execute? How to handle background=True?
results = self._call(batch)
results = self.call(batch)
# Merge entities into local cache and return merged entities.
data = []
merged = dict()
for entity in results[0]['data']:
data.append(self.merge(entity))
data.append(self._merge_recursive(entity, merged))
return data, results[0]['metadata']
@ -856,6 +891,48 @@ class Session(object):
else:
return value
def _merge_recursive(self, entity, merged=None):
'''Merge *entity* and all its attributes recursivly.'''
log_debug = self.logger.isEnabledFor(logging.DEBUG)
if merged is None:
merged = {}
attached = self.merge(entity, merged)
for attribute in entity.attributes:
# Remote attributes.
remote_value = attribute.get_remote_value(entity)
if isinstance(
remote_value,
(
ftrack_api_old.entity.base.Entity,
ftrack_api_old.collection.Collection,
ftrack_api_old.collection.MappedCollectionProxy
)
):
log_debug and self.logger.debug(
'Merging remote value for attribute {0}.'.format(attribute)
)
if isinstance(remote_value, ftrack_api_old.entity.base.Entity):
self._merge_recursive(remote_value, merged=merged)
elif isinstance(
remote_value, ftrack_api_old.collection.Collection
):
for entry in remote_value:
self._merge_recursive(entry, merged=merged)
elif isinstance(
remote_value, ftrack_api_old.collection.MappedCollectionProxy
):
for entry in remote_value.collection:
self._merge_recursive(entry, merged=merged)
return attached
def _merge_entity(self, entity, merged=None):
'''Merge *entity* into session returning merged entity.
@ -1185,7 +1262,7 @@ class Session(object):
# Process batch.
if batch:
result = self._call(batch)
result = self.call(batch)
# Clear recorded operations.
self.recorded_operations.clear()
@ -1260,7 +1337,7 @@ class Session(object):
def _fetch_server_information(self):
'''Return server information.'''
result = self._call([{'action': 'query_server_information'}])
result = self.call([{'action': 'query_server_information'}])
return result[0]
def _discover_plugins(self, plugin_arguments=None):
@ -1362,7 +1439,7 @@ class Session(object):
'Loading schemas from server due to hash not matching.'
'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash
))
schemas = self._call([{'action': 'query_schemas'}])[0]
schemas = self.call([{'action': 'query_schemas'}])[0]
if schema_cache_path:
try:
@ -1525,8 +1602,24 @@ class Session(object):
synchronous=True
)
@ftrack_api_old.logging.deprecation_warning(
'Session._call is now available as public method Session.call. The '
'private method will be removed in version 2.0.'
)
def _call(self, data):
'''Make request to server with *data*.'''
'''Make request to server with *data* batch describing the actions.
.. note::
This private method is now available as public method
:meth:`entity_reference`. This alias remains for backwards
compatibility, but will be removed in version 2.0.
'''
return self.call(data)
def call(self, data):
'''Make request to server with *data* batch describing the actions.'''
url = self._server_url + '/api'
headers = {
'content-type': 'application/json',
@ -1553,7 +1646,7 @@ class Session(object):
'Server reported error in unexpected format. Raw error was: {0}'
.format(response.text)
)
self.logger.error(error_message)
self.logger.exception(error_message)
raise ftrack_api_old.exception.ServerError(error_message)
else:
@ -1562,7 +1655,7 @@ class Session(object):
error_message = 'Server reported error: {0}({1})'.format(
result['exception'], result['content']
)
self.logger.error(error_message)
self.logger.exception(error_message)
raise ftrack_api_old.exception.ServerError(error_message)
return result
@ -1620,12 +1713,12 @@ class Session(object):
if "entity_data" in data:
for key, value in data["entity_data"].items():
if isinstance(value, ftrack_api_old.entity.base.Entity):
data["entity_data"][key] = self._entity_reference(value)
data["entity_data"][key] = self.entity_reference(value)
return data
if isinstance(item, ftrack_api_old.entity.base.Entity):
data = self._entity_reference(item)
data = self.entity_reference(item)
with self.auto_populating(True):
@ -1646,14 +1739,15 @@ class Session(object):
value = attribute.get_local_value(item)
elif entity_attribute_strategy == 'persisted_only':
value = attribute.get_remote_value(item)
if not attribute.computed:
value = attribute.get_remote_value(item)
if value is not ftrack_api_old.symbol.NOT_SET:
if isinstance(
attribute, ftrack_api_old.attribute.ReferenceAttribute
):
if isinstance(value, ftrack_api_old.entity.base.Entity):
value = self._entity_reference(value)
value = self.entity_reference(value)
data[attribute.name] = value
@ -1668,14 +1762,14 @@ class Session(object):
if isinstance(item, ftrack_api_old.collection.Collection):
data = []
for entity in item:
data.append(self._entity_reference(entity))
data.append(self.entity_reference(entity))
return data
raise TypeError('{0!r} is not JSON serializable'.format(item))
def _entity_reference(self, entity):
'''Return reference to *entity*.
def entity_reference(self, entity):
'''Return entity reference that uniquely identifies *entity*.
Return a mapping containing the __entity_type__ of the entity along with
the key, value pairs that make up it's primary key.
@ -1689,6 +1783,26 @@ class Session(object):
return reference
@ftrack_api_old.logging.deprecation_warning(
'Session._entity_reference is now available as public method '
'Session.entity_reference. The private method will be removed '
'in version 2.0.'
)
def _entity_reference(self, entity):
'''Return entity reference that uniquely identifies *entity*.
Return a mapping containing the __entity_type__ of the entity along
with the key, value pairs that make up it's primary key.
.. note::
This private method is now available as public method
:meth:`entity_reference`. This alias remains for backwards
compatibility, but will be removed in version 2.0.
'''
return self.entity_reference(entity)
def decode(self, string):
'''Return decoded JSON *string* as Python object.'''
with self.operation_recording(False):
@ -2016,6 +2130,10 @@ class Session(object):
return availabilities
@ftrack_api_old.logging.deprecation_warning(
'Session.delayed_job has been deprecated in favour of session.call. '
'Please refer to the release notes for more information.'
)
def delayed_job(self, job_type):
'''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned.
@ -2033,7 +2151,7 @@ class Session(object):
}
try:
result = self._call(
result = self.call(
[operation]
)[0]
@ -2070,7 +2188,7 @@ class Session(object):
)
try:
result = self._call([operation])
result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@ -2172,7 +2290,7 @@ class Session(object):
}
try:
result = self._call([operation])
result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@ -2212,7 +2330,7 @@ class Session(object):
}
try:
result = self._call([operation])
result = self.call([operation])
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@ -2258,7 +2376,7 @@ class Session(object):
)
try:
self._call(operations)
self.call(operations)
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
@ -2306,7 +2424,7 @@ class Session(object):
)
try:
self._call(operations)
self.call(operations)
except ftrack_api_old.exception.ServerError as error:
# Raise informative error if the action is not supported.
if 'Invalid action u\'send_review_session_invite\'' in error.message:

View file

@ -1,6 +1,8 @@
# :coding: utf-8
# :copyright: Copyright (c) 2014 ftrack
import os
class Symbol(object):
'''A constant symbol.'''
@ -68,8 +70,8 @@ CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b'
#: Identifier of builtin server location.
SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b'
#: Chunk size used when working with data.
CHUNK_SIZE = 8192
#: Chunk size used when working with data, default to 1Mb.
CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024
#: Symbol representing syncing users with ldap
JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP')

View file

@ -0,0 +1,68 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:application-1.0",
"description": "An application definition.",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"label",
"application_dir",
"executable"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string"
},
"label": {
"description": "Nice name of application.",
"type": "string"
},
"application_dir": {
"description": "Name of directory used for application resources.",
"type": "string"
},
"executable": {
"description": "Name of callable executable, this is called to launch the application",
"type": "string"
},
"description": {
"description": "Description of application.",
"type": "string"
},
"environment": {
"description": "Key/value pairs for environment variables related to this application. Supports lists for paths, such as PYTHONPATH.",
"type": "object",
"items": {
"oneOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}}
]
}
},
"default_dirs": {
"type": "array",
"items": {
"type": "string"
}
},
"copy": {
"type": "object",
"patternProperties": {
"^.*$": {
"anyOf": [
{"type": "string"},
{"type": "null"}
]
}
},
"additionalProperties": false
}
}
}

35
schema/asset-1.0.json Normal file
View file

@ -0,0 +1,35 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:asset-1.0",
"description": "A unit of data",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"name",
"subsets"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string"
},
"name": {
"description": "Name of directory",
"type": "string"
},
"subsets": {
"type": "array",
"items": {
"$ref": "subset.json"
}
}
},
"definitions": {}
}

55
schema/asset-2.0.json Normal file
View file

@ -0,0 +1,55 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:asset-2.0",
"description": "A unit of data",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"name",
"silo",
"data"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string",
"enum": ["avalon-core:asset-2.0"],
"example": "avalon-core:asset-2.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["asset"],
"example": "asset"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of asset",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "Bruce"
},
"silo": {
"description": "Group or container of asset",
"type": "string",
"example": "assets"
},
"data": {
"description": "Document metadata",
"type": "object",
"example": {"key": "value"}
}
},
"definitions": {}
}

55
schema/asset-3.0.json Normal file
View file

@ -0,0 +1,55 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:asset-3.0",
"description": "A unit of data",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"name",
"data"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string",
"enum": ["avalon-core:asset-3.0", "pype:asset-3.0"],
"example": "avalon-core:asset-3.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["asset"],
"example": "asset"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of asset",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "Bruce"
},
"silo": {
"description": "Group or container of asset",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "assets"
},
"data": {
"description": "Document metadata",
"type": "object",
"example": {"key": "value"}
}
},
"definitions": {}
}

86
schema/config-1.0.json Normal file
View file

@ -0,0 +1,86 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:config-1.0",
"description": "A project configuration.",
"type": "object",
"additionalProperties": false,
"required": [
"template",
"tasks",
"apps"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string"
},
"template": {
"type": "object",
"additionalProperties": false,
"patternProperties": {
"^.*$": {
"type": "string"
}
}
},
"tasks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"icon": {"type": "string"},
"group": {"type": "string"},
"label": {"type": "string"}
},
"required": ["name"]
}
},
"apps": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"icon": {"type": "string"},
"group": {"type": "string"},
"label": {"type": "string"}
},
"required": ["name"]
}
},
"families": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"icon": {"type": "string"},
"label": {"type": "string"},
"hideFilter": {"type": "boolean"}
},
"required": ["name"]
}
},
"groups": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"icon": {"type": "string"},
"color": {"type": "string"},
"order": {"type": ["integer", "number"]}
},
"required": ["name"]
}
},
"copy": {
"type": "object"
}
}
}

100
schema/container-1.0.json Normal file
View file

@ -0,0 +1,100 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:container-1.0",
"description": "A loaded asset",
"type": "object",
"additionalProperties": true,
"required": [
"id",
"objectName",
"name",
"author",
"loader",
"families",
"time",
"subset",
"asset",
"representation",
"version",
"silo",
"path",
"source"
],
"properties": {
"id": {
"description": "Identifier for finding object in host",
"type": "string",
"enum": ["pyblish.mindbender.container"],
"example": "pyblish.mindbender.container"
},
"objectName": {
"description": "Name of internal object, such as the objectSet in Maya.",
"type": "string",
"example": "Bruce_:rigDefault_CON"
},
"name": {
"description": "Full name of application object",
"type": "string",
"example": "modelDefault"
},
"author": {
"description": "Name of the author of the published version",
"type": "string",
"example": "Marcus Ottosson"
},
"loader": {
"description": "Name of loader plug-in used to produce this container",
"type": "string",
"example": "ModelLoader"
},
"families": {
"description": "Families associated with the this subset",
"type": "string",
"example": "mindbender.model"
},
"time": {
"description": "File-system safe, formatted time",
"type": "string",
"example": "20170329T131545Z"
},
"subset": {
"description": "Name of source subset",
"type": "string",
"example": "modelDefault"
},
"asset": {
"description": "Name of source asset",
"type": "string" ,
"example": "Bruce"
},
"representation": {
"description": "Name of source representation",
"type": "string" ,
"example": ".ma"
},
"version": {
"description": "Version number",
"type": "number",
"example": 12
},
"silo": {
"description": "Silo of parent asset",
"type": "string",
"example": "assets"
},
"path": {
"description": "Absolute path on disk",
"type": "string",
"example": "{root}/assets/Bruce/publish/rigDefault/v002"
},
"source": {
"description": "Absolute path to file from which this version was published",
"type": "string",
"example": "{root}/assets/Bruce/work/rigging/maya/scenes/rig_v001.ma"
}
}
}

59
schema/container-2.0.json Normal file
View file

@ -0,0 +1,59 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:container-2.0",
"description": "A loaded asset",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"id",
"objectName",
"name",
"namespace",
"loader",
"representation"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string",
"enum": ["avalon-core:container-2.0", "pype:container-2.0"],
"example": "pype:container-2.0"
},
"id": {
"description": "Identifier for finding object in host",
"type": "string",
"enum": ["pyblish.avalon.container"],
"example": "pyblish.avalon.container"
},
"objectName": {
"description": "Name of internal object, such as the objectSet in Maya.",
"type": "string",
"example": "Bruce_:rigDefault_CON"
},
"loader": {
"description": "Name of loader plug-in used to produce this container",
"type": "string",
"example": "ModelLoader"
},
"name": {
"description": "Internal object name of container in application",
"type": "string",
"example": "modelDefault_01"
},
"namespace": {
"description": "Internal namespace of container in application",
"type": "string",
"example": "Bruce_"
},
"representation": {
"description": "Unique id of representation in database",
"type": "string",
"example": "59523f355f8c1b5f6c5e8348"
}
}
}

10
schema/inventory-1.0.json Normal file
View file

@ -0,0 +1,10 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:config-1.0",
"description": "A project configuration.",
"type": "object",
"additionalProperties": true
}

86
schema/project-2.0.json Normal file
View file

@ -0,0 +1,86 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:project-2.0",
"description": "A unit of data",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"name",
"data",
"config"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string",
"enum": ["avalon-core:project-2.0", "pype:project-2.0"],
"example": "avalon-core:project-2.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["project"],
"example": "project"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of directory",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "hulk"
},
"data": {
"description": "Document metadata",
"type": "object",
"example": {
"fps": 24,
"width": 1920,
"height": 1080
}
},
"config": {
"type": "object",
"description": "Document metadata",
"example": {
"schema": "pype:config-1.0",
"apps": [
{
"name": "maya2016",
"label": "Autodesk Maya 2016"
},
{
"name": "nuke10",
"label": "The Foundry Nuke 10.0"
}
],
"tasks": [
{"name": "model"},
{"name": "render"},
{"name": "animate"},
{"name": "rig"},
{"name": "lookdev"},
{"name": "layout"}
],
"template": {
"work":
"{root}/{project}/{silo}/{asset}/work/{task}/{app}",
"publish":
"{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}"
}
},
"$ref": "config-1.0.json"
}
},
"definitions": {}
}

View file

@ -0,0 +1,28 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:representation-1.0",
"description": "The inverse of an instance",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"format",
"path"
],
"properties": {
"schema": {"type": "string"},
"format": {
"description": "File extension, including '.'",
"type": "string"
},
"path": {
"description": "Unformatted path to version.",
"type": "string"
}
}
}

View file

@ -0,0 +1,78 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:representation-2.0",
"description": "The inverse of an instance",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"parent",
"name",
"data"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string",
"enum": ["avalon-core:representation-2.0", "pype:representation-2.0"],
"example": "pype:representation-2.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["representation"],
"example": "representation"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of representation",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "abc"
},
"data": {
"description": "Document metadata",
"type": "object",
"example": {
"label": "Alembic"
}
},
"dependencies": {
"description": "Other representation that this representation depends on",
"type": "array",
"items": {"type": "string"},
"example": [
"592d547a5f8c1b388093c145"
]
},
"context": {
"description": "Summary of the context to which this representation belong.",
"type": "object",
"properties": {
"project": {"type": "object"},
"asset": {"type": "string"},
"silo": {"type": ["string", "null"]},
"subset": {"type": "string"},
"version": {"type": "number"},
"representation": {"type": "string"}
},
"example": {
"project": "hulk",
"asset": "Bruce",
"silo": "assets",
"subset": "rigDefault",
"version": 12,
"representation": "ma"
}
}
}
}

143
schema/session-1.0.json Normal file
View file

@ -0,0 +1,143 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:session-1.0",
"description": "The Avalon environment",
"type": "object",
"additionalProperties": true,
"required": [
"AVALON_PROJECTS",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_SILO",
"AVALON_CONFIG"
],
"properties": {
"AVALON_PROJECTS": {
"description": "Absolute path to root of project directories",
"type": "string",
"example": "/nas/projects"
},
"AVALON_PROJECT": {
"description": "Name of project",
"type": "string",
"pattern": "^\\w*$",
"example": "Hulk"
},
"AVALON_ASSET": {
"description": "Name of asset",
"type": "string",
"pattern": "^\\w*$",
"example": "Bruce"
},
"AVALON_SILO": {
"description": "Name of asset group or container",
"type": "string",
"pattern": "^\\w*$",
"example": "assets"
},
"AVALON_TASK": {
"description": "Name of task",
"type": "string",
"pattern": "^\\w*$",
"example": "modeling"
},
"AVALON_CONFIG": {
"description": "Name of Avalon configuration",
"type": "string",
"pattern": "^\\w*$",
"example": "polly"
},
"AVALON_APP": {
"description": "Name of application",
"type": "string",
"pattern": "^\\w*$",
"example": "maya2016"
},
"AVALON_MONGO": {
"description": "Address to the asset database",
"type": "string",
"pattern": "^mongodb://[\\w/@:.]*$",
"example": "mongodb://localhost:27017",
"default": "mongodb://localhost:27017"
},
"AVALON_DB": {
"description": "Name of database",
"type": "string",
"pattern": "^\\w*$",
"example": "avalon",
"default": "avalon"
},
"AVALON_LABEL": {
"description": "Nice name of Avalon, used in e.g. graphical user interfaces",
"type": "string",
"example": "Mindbender",
"default": "Avalon"
},
"AVALON_SENTRY": {
"description": "Address to Sentry",
"type": "string",
"pattern": "^http[\\w/@:.]*$",
"example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2",
"default": null
},
"AVALON_DEADLINE": {
"description": "Address to Deadline",
"type": "string",
"pattern": "^http[\\w/@:.]*$",
"example": "http://192.168.99.101",
"default": null
},
"AVALON_TIMEOUT": {
"description": "Wherever there is a need for a timeout, this is the default value.",
"type": "string",
"pattern": "^[0-9]*$",
"default": "1000",
"example": "1000"
},
"AVALON_UPLOAD": {
"description": "Boolean of whether to upload published material to central asset repository",
"type": "string",
"default": null,
"example": "True"
},
"AVALON_USERNAME": {
"description": "Generic username",
"type": "string",
"pattern": "^\\w*$",
"default": "avalon",
"example": "myself"
},
"AVALON_PASSWORD": {
"description": "Generic password",
"type": "string",
"pattern": "^\\w*$",
"default": "secret",
"example": "abc123"
},
"AVALON_INSTANCE_ID": {
"description": "Unique identifier for instances in a working file",
"type": "string",
"pattern": "^[\\w.]*$",
"default": "avalon.instance",
"example": "avalon.instance"
},
"AVALON_CONTAINER_ID": {
"description": "Unique identifier for a loaded representation in a working file",
"type": "string",
"pattern": "^[\\w.]*$",
"default": "avalon.container",
"example": "avalon.container"
},
"AVALON_DEBUG": {
"description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.",
"type": "string",
"default": null,
"example": "True"
}
}
}

142
schema/session-2.0.json Normal file
View file

@ -0,0 +1,142 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:session-2.0",
"description": "The Avalon environment",
"type": "object",
"additionalProperties": true,
"required": [
"AVALON_PROJECTS",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_CONFIG"
],
"properties": {
"AVALON_PROJECTS": {
"description": "Absolute path to root of project directories",
"type": "string",
"example": "/nas/projects"
},
"AVALON_PROJECT": {
"description": "Name of project",
"type": "string",
"pattern": "^\\w*$",
"example": "Hulk"
},
"AVALON_ASSET": {
"description": "Name of asset",
"type": "string",
"pattern": "^\\w*$",
"example": "Bruce"
},
"AVALON_SILO": {
"description": "Name of asset group or container",
"type": "string",
"pattern": "^\\w*$",
"example": "assets"
},
"AVALON_TASK": {
"description": "Name of task",
"type": "string",
"pattern": "^\\w*$",
"example": "modeling"
},
"AVALON_CONFIG": {
"description": "Name of Avalon configuration",
"type": "string",
"pattern": "^\\w*$",
"example": "polly"
},
"AVALON_APP": {
"description": "Name of application",
"type": "string",
"pattern": "^\\w*$",
"example": "maya2016"
},
"AVALON_MONGO": {
"description": "Address to the asset database",
"type": "string",
"pattern": "^mongodb://[\\w/@:.]*$",
"example": "mongodb://localhost:27017",
"default": "mongodb://localhost:27017"
},
"AVALON_DB": {
"description": "Name of database",
"type": "string",
"pattern": "^\\w*$",
"example": "avalon",
"default": "avalon"
},
"AVALON_LABEL": {
"description": "Nice name of Avalon, used in e.g. graphical user interfaces",
"type": "string",
"example": "Mindbender",
"default": "Avalon"
},
"AVALON_SENTRY": {
"description": "Address to Sentry",
"type": "string",
"pattern": "^http[\\w/@:.]*$",
"example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2",
"default": null
},
"AVALON_DEADLINE": {
"description": "Address to Deadline",
"type": "string",
"pattern": "^http[\\w/@:.]*$",
"example": "http://192.168.99.101",
"default": null
},
"AVALON_TIMEOUT": {
"description": "Wherever there is a need for a timeout, this is the default value.",
"type": "string",
"pattern": "^[0-9]*$",
"default": "1000",
"example": "1000"
},
"AVALON_UPLOAD": {
"description": "Boolean of whether to upload published material to central asset repository",
"type": "string",
"default": null,
"example": "True"
},
"AVALON_USERNAME": {
"description": "Generic username",
"type": "string",
"pattern": "^\\w*$",
"default": "avalon",
"example": "myself"
},
"AVALON_PASSWORD": {
"description": "Generic password",
"type": "string",
"pattern": "^\\w*$",
"default": "secret",
"example": "abc123"
},
"AVALON_INSTANCE_ID": {
"description": "Unique identifier for instances in a working file",
"type": "string",
"pattern": "^[\\w.]*$",
"default": "avalon.instance",
"example": "avalon.instance"
},
"AVALON_CONTAINER_ID": {
"description": "Unique identifier for a loaded representation in a working file",
"type": "string",
"pattern": "^[\\w.]*$",
"default": "avalon.container",
"example": "avalon.container"
},
"AVALON_DEBUG": {
"description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.",
"type": "string",
"default": null,
"example": "True"
}
}
}

32
schema/shaders-1.0.json Normal file
View file

@ -0,0 +1,32 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:shaders-1.0",
"description": "Relationships between shaders and Avalon IDs",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"shader"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string"
},
"shader": {
"description": "Name of directory",
"type": "array",
"items": {
"type": "str",
"description": "Avalon ID and optional face indexes, e.g. 'f9520572-ac1d-11e6-b39e-3085a99791c9.f[5002:5185]'"
}
}
},
"definitions": {}
}

35
schema/subset-1.0.json Normal file
View file

@ -0,0 +1,35 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:subset-1.0",
"description": "A container of instances",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"name",
"versions"
],
"properties": {
"schema": {
"description": "Schema identifier for payload",
"type": "string"
},
"name": {
"description": "Name of directory",
"type": "string"
},
"versions": {
"type": "array",
"items": {
"$ref": "version.json"
}
}
},
"definitions": {}
}

51
schema/subset-2.0.json Normal file
View file

@ -0,0 +1,51 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:subset-2.0",
"description": "A container of instances",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"parent",
"name",
"data"
],
"properties": {
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["pype:subset-2.0"],
"example": "pype:subset-2.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["subset"],
"example": "subset"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of directory",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "shot01"
},
"data": {
"type": "object",
"description": "Document metadata",
"example": {
"frameStart": 1000,
"frameEnd": 1201
}
}
}
}

62
schema/subset-3.0.json Normal file
View file

@ -0,0 +1,62 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:subset-3.0",
"description": "A container of instances",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"parent",
"name",
"data"
],
"properties": {
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["avalon-core:subset-3.0", "pype:subset-3.0"],
"example": "pype:subset-3.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["subset"],
"example": "subset"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Name of directory",
"type": "string",
"pattern": "^[a-zA-Z0-9_.]*$",
"example": "shot01"
},
"data": {
"description": "Document metadata",
"type": "object",
"required": ["families"],
"properties": {
"families": {
"type": "array",
"items": {"type": "string"},
"description": "One or more families associated with this subset"
}
},
"example": {
"families" : [
"avalon.camera"
],
"frameStart": 1000,
"frameEnd": 1201
}
}
}
}

42
schema/thumbnail-1.0.json Normal file
View file

@ -0,0 +1,42 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:thumbnail-1.0",
"description": "Entity with thumbnail data",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"data"
],
"properties": {
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["pype:thumbnail-1.0"],
"example": "pype:thumbnail-1.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["thumbnail"],
"example": "thumbnail"
},
"data": {
"description": "Thumbnail data",
"type": "object",
"example": {
"binary_data": "Binary({byte data of image})",
"template": "{thumbnail_root}/{project[name]}/{_id}{ext}}",
"template_data": {
"ext": ".jpg"
}
}
}
}
}

50
schema/version-1.0.json Normal file
View file

@ -0,0 +1,50 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:version-1.0",
"description": "An individual version",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"version",
"path",
"time",
"author",
"source",
"representations"
],
"properties": {
"schema": {"type": "string"},
"representations": {
"type": "array",
"items": {
"$ref": "representation.json"
}
},
"time": {
"description": "ISO formatted, file-system compatible time",
"type": "string"
},
"author": {
"description": "User logged on to the machine at time of publish",
"type": "string"
},
"version": {
"description": "Number of this version",
"type": "number"
},
"path": {
"description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
"type": "string"
},
"source": {
"description": "Original file from which this version was made.",
"type": "string"
}
}
}

92
schema/version-2.0.json Normal file
View file

@ -0,0 +1,92 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:version-2.0",
"description": "An individual version",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"parent",
"name",
"data"
],
"properties": {
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["pype:version-2.0"],
"example": "pype:version-2.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["version"],
"example": "version"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Number of version",
"type": "number",
"example": 12
},
"locations": {
"description": "Where on the planet this version can be found.",
"type": "array",
"items": {"type": "string"},
"example": ["data.avalon.com"]
},
"data": {
"description": "Document metadata",
"type": "object",
"required": ["families", "author", "source", "time"],
"properties": {
"time": {
"description": "ISO formatted, file-system compatible time",
"type": "string"
},
"timeFormat": {
"description": "ISO format of time",
"type": "string"
},
"author": {
"description": "User logged on to the machine at time of publish",
"type": "string"
},
"version": {
"description": "Number of this version",
"type": "number"
},
"path": {
"description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
"type": "string"
},
"source": {
"description": "Original file from which this version was made.",
"type": "string"
},
"families": {
"type": "array",
"items": {"type": "string"},
"description": "One or more families associated with this version"
}
},
"example": {
"source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma",
"author" : "marcus",
"families" : [
"avalon.model"
],
"time" : "20170510T090203Z"
}
}
}
}

84
schema/version-3.0.json Normal file
View file

@ -0,0 +1,84 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:version-3.0",
"description": "An individual version",
"type": "object",
"additionalProperties": true,
"required": [
"schema",
"type",
"parent",
"name",
"data"
],
"properties": {
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["avalon-core:version-3.0", "pype:version-3.0"],
"example": "pype:version-3.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["version"],
"example": "version"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "592c33475f8c1b064c4d1696"
},
"name": {
"description": "Number of version",
"type": "number",
"example": 12
},
"locations": {
"description": "Where on the planet this version can be found.",
"type": "array",
"items": {"type": "string"},
"example": ["data.avalon.com"]
},
"data": {
"description": "Document metadata",
"type": "object",
"required": ["author", "source", "time"],
"properties": {
"time": {
"description": "ISO formatted, file-system compatible time",
"type": "string"
},
"timeFormat": {
"description": "ISO format of time",
"type": "string"
},
"author": {
"description": "User logged on to the machine at time of publish",
"type": "string"
},
"version": {
"description": "Number of this version",
"type": "number"
},
"path": {
"description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001",
"type": "string"
},
"source": {
"description": "Original file from which this version was made.",
"type": "string"
}
},
"example": {
"source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma",
"author" : "marcus",
"time" : "20170510T090203Z"
}
}
}
}