mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into feature/PYPE-433-deadline-publish-with-correct-c
This commit is contained in:
commit
4ac61f1060
758 changed files with 8166 additions and 169380 deletions
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 pype club
|
||||
Copyright (c) 2018 orbi tools s.r.o
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
|
|
|||
|
|
@ -14,6 +14,11 @@ class AvalonApps:
|
|||
self.parent = parent
|
||||
self.app_launcher = None
|
||||
|
||||
def process_modules(self, modules):
|
||||
if "RestApiServer" in modules:
|
||||
from .rest_api import AvalonRestApi
|
||||
self.rest_api_obj = AvalonRestApi()
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu=None):
|
||||
# Actions
|
||||
|
|
|
|||
86
pype/avalon_apps/rest_api.py
Normal file
86
pype/avalon_apps/rest_api.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import bson
|
||||
import bson.json_util
|
||||
from pype.services.rest_api import RestApi, abort, CallbackResult
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
||||
|
||||
class AvalonRestApi(RestApi):
|
||||
dbcon = DbConnector(
|
||||
os.environ["AVALON_MONGO"],
|
||||
os.environ["AVALON_DB"]
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dbcon.install()
|
||||
|
||||
@RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET")
|
||||
def get_project(self, request):
|
||||
project_name = request.url_data["project_name"]
|
||||
if not project_name:
|
||||
output = {}
|
||||
for project_name in self.dbcon.tables():
|
||||
project = self.dbcon[project_name].find_one({"type": "project"})
|
||||
output[project_name] = project
|
||||
|
||||
return CallbackResult(data=self.result_to_json(output))
|
||||
|
||||
project = self.dbcon[project_name].find_one({"type": "project"})
|
||||
|
||||
if project:
|
||||
return CallbackResult(data=self.result_to_json(project))
|
||||
|
||||
abort(404, "Project \"{}\" was not found in database".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
@RestApi.route("/projects/<project_name>/assets/<asset>", url_prefix="/avalon", methods="GET")
|
||||
def get_assets(self, request):
|
||||
_project_name = request.url_data["project_name"]
|
||||
_asset = request.url_data["asset"]
|
||||
|
||||
if not self.dbcon.exist_table(_project_name):
|
||||
abort(404, "Project \"{}\" was not found in database".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
if not _asset:
|
||||
assets = self.dbcon[_project_name].find({"type": "asset"})
|
||||
output = self.result_to_json(assets)
|
||||
return CallbackResult(data=output)
|
||||
|
||||
# identificator can be specified with url query (default is `name`)
|
||||
identificator = request.query.get("identificator", "name")
|
||||
|
||||
asset = self.dbcon[_project_name].find_one({
|
||||
"type": "asset",
|
||||
identificator: _asset
|
||||
})
|
||||
if asset:
|
||||
id = asset["_id"]
|
||||
asset["_id"] = str(id)
|
||||
return asset
|
||||
|
||||
abort(404, "Asset \"{}\" with {} was not found in project {}".format(
|
||||
_asset, identificator, project_name
|
||||
))
|
||||
|
||||
def result_to_json(self, result):
|
||||
""" Converts result of MongoDB query to dict without $oid (ObjectId)
|
||||
keys with help of regex matching.
|
||||
|
||||
..note:
|
||||
This will convert object type entries similar to ObjectId.
|
||||
"""
|
||||
bson_json = bson.json_util.dumps(result)
|
||||
# Replace "{$oid: "{entity id}"}" with "{entity id}"
|
||||
regex1 = '(?P<id>{\"\$oid\": \"[^\"]+\"})'
|
||||
regex2 = '{\"\$oid\": (?P<id>\"[^\"]+\")}'
|
||||
for value in re.findall(regex1, bson_json):
|
||||
for substr in re.findall(regex2, value):
|
||||
bson_json = bson_json.replace(value, substr)
|
||||
|
||||
return json.loads(bson_json)
|
||||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction, MissingPermision
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
from .lib import *
|
||||
from .ftrack_server import FtrackServer
|
||||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,9 @@ import argparse
|
|||
import json
|
||||
import arrow
|
||||
import logging
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, get_ca_mongoid
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pypeapp import config
|
||||
from ftrack_api.exception import NoResultFoundError
|
||||
|
||||
|
|
@ -171,7 +172,6 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
def avalon_mongo_id_attributes(self, session):
|
||||
# Attribute Name and Label
|
||||
cust_attr_name = get_ca_mongoid()
|
||||
cust_attr_label = 'Avalon/Mongo Id'
|
||||
|
||||
# Types that don't need object_type_id
|
||||
|
|
@ -207,7 +207,7 @@ class CustomAttributes(BaseAction):
|
|||
group = self.get_group('avalon')
|
||||
|
||||
data = {}
|
||||
data['key'] = cust_attr_name
|
||||
data['key'] = CustAttrIdKey
|
||||
data['label'] = cust_attr_label
|
||||
data['type'] = custom_attribute_type
|
||||
data['default'] = ''
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
import argparse
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import lib as avalonlib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import re
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -142,6 +142,13 @@ class CreateProjectFolders(BaseAction):
|
|||
else:
|
||||
data['project_id'] = parent['project']['id']
|
||||
|
||||
existing_entity = self.session.query((
|
||||
"TypedContext where name is \"{}\" and "
|
||||
"parent_id is \"{}\" and project_id is \"{}\""
|
||||
).format(name, data['parent_id'], data['project_id'])).first()
|
||||
if existing_entity:
|
||||
return existing_entity
|
||||
|
||||
new_ent = self.session.create(ent_type, data)
|
||||
self.session.commit()
|
||||
return new_ent
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import json
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import logging
|
||||
from bson.objectid import ObjectId
|
||||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
|
@ -277,10 +277,7 @@ class DeleteAsset(BaseAction):
|
|||
'message': 'No entities to delete in avalon'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
delete_query = {'_id': {'$in': all_ids}}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import logging
|
||||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
|
@ -97,10 +97,7 @@ class AssetsRemover(BaseAction):
|
|||
'message': 'None of assets'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
delete_query = {'_id': {'$in': all_ids}}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
self.db.uninstall()
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import json
|
|||
import logging
|
||||
import subprocess
|
||||
from operator import itemgetter
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import Logger, config
|
||||
|
||||
|
|
@ -36,12 +36,13 @@ class DJVViewAction(BaseAction):
|
|||
'file_ext', ["img", "mov", "exr"]
|
||||
)
|
||||
|
||||
def register(self):
|
||||
assert (self.djv_path is not None), (
|
||||
'DJV View is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
super().register()
|
||||
def preregister(self):
|
||||
if self.djv_path is None:
|
||||
return (
|
||||
'DJV View is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
return True
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
"""Return available actions based on *event*. """
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import logging
|
||||
import json
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -108,6 +108,7 @@ class JobKiller(BaseAction):
|
|||
'Changing Job ({}) status: {} -> failed'
|
||||
).format(job['id'], origin_status))
|
||||
except Exception:
|
||||
session.rollback()
|
||||
self.log.warning((
|
||||
'Changing Job ({}) has failed'
|
||||
).format(job['id']))
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
|
|
|||
|
|
@ -2,12 +2,9 @@ import os
|
|||
import json
|
||||
|
||||
from ruamel import yaml
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
from pype.ftrack.lib import get_avalon_attr
|
||||
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
from pype.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
|
|
@ -55,6 +52,8 @@ class PrepareProject(BaseAction):
|
|||
attributes_to_set = {}
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
@ -65,6 +64,8 @@ class PrepareProject(BaseAction):
|
|||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import json
|
|||
|
||||
from pypeapp import Logger, config
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from avalon import io, api
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -61,12 +61,12 @@ class RVAction(BaseAction):
|
|||
def set_rv_path(self):
|
||||
self.rv_path = self.config_data.get("rv_path")
|
||||
|
||||
def register(self):
|
||||
assert (self.rv_path is not None), (
|
||||
'RV is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
super().register()
|
||||
def preregister(self):
|
||||
if self.rv_path is None:
|
||||
return (
|
||||
'RV is not installed or paths in presets are not set correctly'
|
||||
)
|
||||
return True
|
||||
|
||||
def get_components_from_entity(self, session, entity, components):
|
||||
"""Get components from various entity types.
|
||||
|
|
|
|||
347
pype/ftrack/actions/action_seed.py
Normal file
347
pype/ftrack/actions/action_seed.py
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
import os
|
||||
from operator import itemgetter
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
class SeedDebugProject(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = "seed.debug.project"
|
||||
#: Action label.
|
||||
label = "SeedDebugProject"
|
||||
#: Action description.
|
||||
description = "Description"
|
||||
#: priority
|
||||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = "{}/ftrack/action_icons/SeedProject.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
|
||||
# Asset names which will be created in `Assets` entity
|
||||
assets = [
|
||||
"Addax", "Alpaca", "Ant", "Antelope", "Aye", "Badger", "Bear", "Bee",
|
||||
"Beetle", "Bluebird", "Bongo", "Bontebok", "Butterflie", "Caiman",
|
||||
"Capuchin", "Capybara", "Cat", "Caterpillar", "Coyote", "Crocodile",
|
||||
"Cuckoo", "Deer", "Dragonfly", "Duck", "Eagle", "Egret", "Elephant",
|
||||
"Falcon", "Fossa", "Fox", "Gazelle", "Gecko", "Gerbil",
|
||||
"GiantArmadillo", "Gibbon", "Giraffe", "Goose", "Gorilla",
|
||||
"Grasshoper", "Hare", "Hawk", "Hedgehog", "Heron", "Hog",
|
||||
"Hummingbird", "Hyena", "Chameleon", "Cheetah", "Iguana", "Jackal",
|
||||
"Jaguar", "Kingfisher", "Kinglet", "Kite", "Komodo", "Lemur",
|
||||
"Leopard", "Lion", "Lizard", "Macaw", "Malachite", "Mandrill",
|
||||
"Mantis", "Marmoset", "Meadowlark", "Meerkat", "Mockingbird",
|
||||
"Mongoose", "Monkey", "Nyal", "Ocelot", "Okapi", "Oribi", "Oriole",
|
||||
"Otter", "Owl", "Panda", "Parrot", "Pelican", "Pig", "Porcupine",
|
||||
"Reedbuck", "Rhinocero", "Sandpiper", "Servil", "Skink", "Sloth",
|
||||
"Snake", "Spider", "Squirrel", "Sunbird", "Swallow", "Swift", "Tiger",
|
||||
"Sylph", "Tanager", "Vulture", "Warthog", "Waterbuck", "Woodpecker",
|
||||
"Zebra"
|
||||
]
|
||||
|
||||
# Tasks which will be created for Assets
|
||||
asset_tasks = [
|
||||
"Modeling", "Lookdev", "Rigging"
|
||||
]
|
||||
# Tasks which will be created for Shots
|
||||
shot_tasks = [
|
||||
"Animation", "Lighting", "Compositing", "FX"
|
||||
]
|
||||
|
||||
# Define how much sequences will be created
|
||||
default_seq_count = 5
|
||||
# Define how much shots will be created for each sequence
|
||||
default_shots_count = 10
|
||||
|
||||
existing_projects = None
|
||||
new_project_item = "< New Project >"
|
||||
current_project_item = "< Current Project >"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event["data"].get("values", {}):
|
||||
return
|
||||
|
||||
title = "Select Project where you want to create seed data"
|
||||
|
||||
items = []
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
description_label = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"WARNING: Action does NOT check if entities already exist !!!"
|
||||
)
|
||||
}
|
||||
items.append(description_label)
|
||||
|
||||
all_projects = session.query("select full_name from Project").all()
|
||||
self.existing_projects = [proj["full_name"] for proj in all_projects]
|
||||
projects_items = [
|
||||
{"label": proj, "value": proj} for proj in self.existing_projects
|
||||
]
|
||||
|
||||
data_items = []
|
||||
|
||||
data_items.append({
|
||||
"label": self.new_project_item,
|
||||
"value": self.new_project_item
|
||||
})
|
||||
|
||||
data_items.append({
|
||||
"label": self.current_project_item,
|
||||
"value": self.current_project_item
|
||||
})
|
||||
|
||||
data_items.extend(sorted(
|
||||
projects_items,
|
||||
key=itemgetter("label"),
|
||||
reverse=False
|
||||
))
|
||||
projects_item = {
|
||||
"label": "Choose Project",
|
||||
"type": "enumerator",
|
||||
"name": "project_name",
|
||||
"data": data_items,
|
||||
"value": self.current_project_item
|
||||
}
|
||||
items.append(projects_item)
|
||||
items.append(item_splitter)
|
||||
|
||||
items.append({
|
||||
"label": "Number of assets",
|
||||
"type": "number",
|
||||
"name": "asset_count",
|
||||
"value": len(self.assets)
|
||||
})
|
||||
items.append({
|
||||
"label": "Number of sequences",
|
||||
"type": "number",
|
||||
"name": "seq_count",
|
||||
"value": self.default_seq_count
|
||||
})
|
||||
items.append({
|
||||
"label": "Number of shots",
|
||||
"type": "number",
|
||||
"name": "shots_count",
|
||||
"value": self.default_shots_count
|
||||
})
|
||||
items.append(item_splitter)
|
||||
|
||||
note_label = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: Enter project name and choose schema if you "
|
||||
"chose `\"< New Project >\"`(code is optional)</i><p>"
|
||||
)
|
||||
}
|
||||
items.append(note_label)
|
||||
items.append({
|
||||
"label": "Project name",
|
||||
"name": "new_project_name",
|
||||
"type": "text",
|
||||
"value": ""
|
||||
})
|
||||
|
||||
project_schemas = [
|
||||
sch["name"] for sch in self.session.query("ProjectSchema").all()
|
||||
]
|
||||
schemas_item = {
|
||||
"label": "Choose Schema",
|
||||
"type": "enumerator",
|
||||
"name": "new_schema_name",
|
||||
"data": [
|
||||
{"label": sch, "value": sch} for sch in project_schemas
|
||||
],
|
||||
"value": project_schemas[0]
|
||||
}
|
||||
items.append(schemas_item)
|
||||
|
||||
items.append({
|
||||
"label": "*Project code",
|
||||
"name": "new_project_code",
|
||||
"type": "text",
|
||||
"value": "",
|
||||
"empty_text": "Optional..."
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
|
||||
# THIS IS THE PROJECT PART
|
||||
values = event["data"]["values"]
|
||||
selected_project = values["project_name"]
|
||||
if selected_project == self.new_project_item:
|
||||
project_name = values["new_project_name"]
|
||||
if project_name in self.existing_projects:
|
||||
msg = "Project \"{}\" already exist".format(project_name)
|
||||
self.log.error(msg)
|
||||
return {"success": False, "message": msg}
|
||||
|
||||
project_code = values["new_project_code"]
|
||||
project_schema_name = values["new_schema_name"]
|
||||
if not project_code:
|
||||
project_code = project_name
|
||||
project_code = project_code.lower().replace(" ", "_").strip()
|
||||
_project = session.query(
|
||||
"Project where name is \"{}\"".format(project_code)
|
||||
).first()
|
||||
if _project:
|
||||
msg = "Project with code \"{}\" already exist".format(
|
||||
project_code
|
||||
)
|
||||
self.log.error(msg)
|
||||
return {"success": False, "message": msg}
|
||||
|
||||
project_schema = session.query(
|
||||
"ProjectSchema where name is \"{}\"".format(
|
||||
project_schema_name
|
||||
)
|
||||
).one()
|
||||
# Create the project with the chosen schema.
|
||||
self.log.debug((
|
||||
"*** Creating Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(project_name, project_code, project_schema_name))
|
||||
project = session.create("Project", {
|
||||
"name": project_code,
|
||||
"full_name": project_name,
|
||||
"project_schema": project_schema
|
||||
})
|
||||
session.commit()
|
||||
|
||||
elif selected_project == self.current_project_item:
|
||||
entity = in_entities[0]
|
||||
if entity.entity_type.lower() == "project":
|
||||
project = entity
|
||||
else:
|
||||
if "project" in entity:
|
||||
project = entity["project"]
|
||||
else:
|
||||
project = entity["parent"]["project"]
|
||||
project_schema = project["project_schema"]
|
||||
self.log.debug((
|
||||
"*** Using Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(
|
||||
project["full_name"], project["name"], project_schema["name"]
|
||||
))
|
||||
else:
|
||||
project = session.query("Project where full_name is \"{}\"".format(
|
||||
selected_project
|
||||
)).one()
|
||||
project_schema = project["project_schema"]
|
||||
self.log.debug((
|
||||
"*** Using Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(
|
||||
project["full_name"], project["name"], project_schema["name"]
|
||||
))
|
||||
|
||||
# THIS IS THE MAGIC PART
|
||||
task_types = {}
|
||||
for _type in project_schema["_task_type_schema"]["types"]:
|
||||
if _type["name"] not in task_types:
|
||||
task_types[_type["name"]] = _type
|
||||
self.task_types = task_types
|
||||
|
||||
asset_count = values.get("asset_count") or len(self.assets)
|
||||
seq_count = values.get("seq_count") or self.default_seq_count
|
||||
shots_count = values.get("shots_count") or self.default_shots_count
|
||||
|
||||
self.create_assets(project, asset_count)
|
||||
self.create_shots(project, seq_count, shots_count)
|
||||
|
||||
return True
|
||||
|
||||
def create_assets(self, project, asset_count):
|
||||
self.log.debug("*** Creating assets:")
|
||||
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Assets",
|
||||
"parent": project
|
||||
})
|
||||
self.log.debug("- Assets")
|
||||
available_assets = len(self.assets)
|
||||
repetitive_times = (
|
||||
int(asset_count / available_assets) +
|
||||
(asset_count % available_assets > 0)
|
||||
)
|
||||
created_assets = 0
|
||||
for _asset_name in self.assets:
|
||||
if created_assets >= asset_count:
|
||||
break
|
||||
for asset_num in range(1, repetitive_times + 1):
|
||||
if created_assets >= asset_count:
|
||||
break
|
||||
asset_name = "%s_%02d" % (_asset_name, asset_num)
|
||||
asset = self.session.create("AssetBuild", {
|
||||
"name": asset_name,
|
||||
"parent": main_entity
|
||||
})
|
||||
created_assets += 1
|
||||
self.log.debug("- Assets/{}".format(asset_name))
|
||||
|
||||
for task_name in self.asset_tasks:
|
||||
self.session.create("Task", {
|
||||
"name": task_name,
|
||||
"parent": asset,
|
||||
"type": self.task_types[task_name]
|
||||
})
|
||||
self.log.debug("- Assets/{}/{}".format(
|
||||
asset_name, task_name
|
||||
))
|
||||
|
||||
self.log.debug("*** Commiting Assets")
|
||||
self.session.commit()
|
||||
|
||||
def create_shots(self, project, seq_count, shots_count):
|
||||
self.log.debug("*** Creating shots:")
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Shots",
|
||||
"parent": project
|
||||
})
|
||||
self.log.debug("- Shots")
|
||||
|
||||
for seq_num in range(1, seq_count+1):
|
||||
seq_name = "sq%03d" % seq_num
|
||||
seq = self.session.create("Sequence", {
|
||||
"name": seq_name,
|
||||
"parent": main_entity
|
||||
})
|
||||
self.log.debug("- Shots/{}".format(seq_name))
|
||||
|
||||
for shot_num in range(1, shots_count+1):
|
||||
shot_name = "%ssh%04d" % (seq_name, (shot_num*10))
|
||||
shot = self.session.create("Shot", {
|
||||
"name": shot_name,
|
||||
"parent": seq
|
||||
})
|
||||
self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))
|
||||
|
||||
for task_name in self.shot_tasks:
|
||||
self.session.create("Task", {
|
||||
"name": task_name,
|
||||
"parent": shot,
|
||||
"type": self.task_types[task_name]
|
||||
})
|
||||
self.log.debug("- Shots/{}/{}/{}".format(
|
||||
seq_name, shot_name, task_name
|
||||
))
|
||||
|
||||
self.log.debug("*** Commiting Shots")
|
||||
self.session.commit()
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SeedDebugProject(session, plugins_presets).register()
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,351 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseAction):
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs.local'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Local)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for entity in entities:
|
||||
if (
|
||||
entity.get('context_type', '').lower() in ('show', 'task') and
|
||||
entity.entity_type.lower() != 'task'
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Hierachical attributes'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
).format(cust_attr['label']))
|
||||
continue
|
||||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
return {
|
||||
'success': True,
|
||||
'message': msg
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() == 'project':
|
||||
project_name = entity['full_name']
|
||||
else:
|
||||
project_name = entity['project']['full_name']
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
self.db_con.uninstall()
|
||||
|
||||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
if self.interface_messages:
|
||||
title = "Errors during SyncHierarchicalAttrs"
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages, title=title, event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_value(self, key, entity):
|
||||
value = entity['custom_attributes'][key]
|
||||
if (
|
||||
value is not None or
|
||||
entity.entity_type.lower() == 'project'
|
||||
):
|
||||
return value
|
||||
|
||||
return self.get_hierarchical_value(key, entity['parent'])
|
||||
|
||||
def update_hierarchical_attribute(self, entity, key, value):
|
||||
if (
|
||||
entity['context_type'].lower() not in ('show', 'task') or
|
||||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Change value if entity has set it's own
|
||||
entity_value = custom_attributes[key]
|
||||
if entity_value is not None:
|
||||
value = entity_value
|
||||
|
||||
data = mongo_entity.get('data') or {}
|
||||
|
||||
data[key] = value
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
'-- stored value "{}"'.format(value)
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
193
pype/ftrack/actions/action_sync_to_avalon.py
Normal file
193
pype/ftrack/actions/action_sync_to_avalon.py
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
||||
|
||||
class SyncToAvalonLocal(BaseAction):
|
||||
"""
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon'
|
||||
- custom attributes that start with 'avalon_' are skipped
|
||||
|
||||
* These information are stored for entities in whole project.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack
|
||||
- Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action
|
||||
- or do it manually (Not recommended)
|
||||
"""
|
||||
|
||||
#: Action identifier.
|
||||
identifier = "sync.to.avalon.local"
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
#: Action variant
|
||||
variant = "- Sync To Avalon (Local)"
|
||||
#: Action description.
|
||||
description = "Send data from Ftrack to Avalon"
|
||||
#: priority
|
||||
priority = 200
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() in ["show", "task"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
time_start = time.time()
|
||||
|
||||
self.show_message(event, "Synchronization - Preparing data", True)
|
||||
# Get ftrack project
|
||||
if in_entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = in_entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = in_entities[0]["project"]["full_name"]
|
||||
|
||||
try:
|
||||
self.entities_factory.launch_setup(ft_project_name)
|
||||
time_1 = time.time()
|
||||
|
||||
self.entities_factory.set_cutom_attributes()
|
||||
time_2 = time.time()
|
||||
|
||||
# This must happen before all filtering!!!
|
||||
self.entities_factory.prepare_avalon_entities(ft_project_name)
|
||||
time_3 = time.time()
|
||||
|
||||
self.entities_factory.filter_by_ignore_sync()
|
||||
time_4 = time.time()
|
||||
|
||||
self.entities_factory.duplicity_regex_check()
|
||||
time_5 = time.time()
|
||||
|
||||
self.entities_factory.prepare_ftrack_ent_data()
|
||||
time_6 = time.time()
|
||||
|
||||
self.entities_factory.synchronize()
|
||||
time_7 = time.time()
|
||||
|
||||
self.log.debug(
|
||||
"*** Synchronization finished ***"
|
||||
)
|
||||
self.log.debug(
|
||||
"preparation <{}>".format(time_1 - time_start)
|
||||
)
|
||||
self.log.debug(
|
||||
"set_cutom_attributes <{}>".format(time_2 - time_1)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_avalon_entities <{}>".format(time_3 - time_2)
|
||||
)
|
||||
self.log.debug(
|
||||
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
|
||||
)
|
||||
self.log.debug(
|
||||
"duplicity_regex_check <{}>".format(time_5 - time_4)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
|
||||
)
|
||||
self.log.debug(
|
||||
"synchronize <{}>".format(time_7 - time_6)
|
||||
)
|
||||
self.log.debug(
|
||||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
ft_project_name
|
||||
)
|
||||
self.show_interface(
|
||||
items=report["items"],
|
||||
title=report.get("title", default_title),
|
||||
event=event
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Synchronization Finished"
|
||||
}
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Synchronization failed due to code error", exc_info=True
|
||||
)
|
||||
msg = "An error occurred during synchronization"
|
||||
title = "Synchronization report ({}):".format(ft_project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## Traceback of the error"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p>{}</p>".format(
|
||||
str(traceback.format_exc()).replace(
|
||||
"\n", "<br>").replace(
|
||||
" ", " "
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
report = {"items": []}
|
||||
try:
|
||||
report = self.entities_factory.report()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_items = report.get("items", [])
|
||||
if _items:
|
||||
items.append(self.entities_factory.report_splitter)
|
||||
items.extend(_items)
|
||||
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
finally:
|
||||
try:
|
||||
self.entities_factory.dbcon.uninstall()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.entities_factory.session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncToAvalonLocal(session, plugins_presets).register()
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import collections
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib import avalon_sync as ftracklib
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Silo(string) - Last parent except project
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
|
||||
|
||||
* These information are stored also for all parents and children entities.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action or do it manually (Not recommended)
|
||||
|
||||
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
|
||||
- name, parents and silo are checked -> shows error if are not exact the same
|
||||
- after sync it is not allowed to change names or move entities
|
||||
|
||||
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
|
||||
- tries to find entity by name
|
||||
- found:
|
||||
- raise error if ftrackId/visual parent/parents are not same
|
||||
- not found:
|
||||
- Creates asset/project
|
||||
|
||||
'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon.local'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync To Avalon (Local)'
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub']
|
||||
#: Action priority
|
||||
priority = 200
|
||||
|
||||
project_query = (
|
||||
"select full_name, name, custom_attributes"
|
||||
", project_schema._task_type_schema.types.name"
|
||||
" from Project where full_name is \"{}\""
|
||||
)
|
||||
|
||||
entities_query = (
|
||||
"select id, name, parent_id, link, custom_attributes"
|
||||
" from TypedContext where project.full_name is \"{}\""
|
||||
)
|
||||
|
||||
# Entity type names(lowered) that won't be synchronized with their children
|
||||
ignore_entity_types = ["task", "milestone"]
|
||||
|
||||
def __init__(self, session, plugins_presets):
|
||||
super(SyncToAvalon, self).__init__(session)
|
||||
# reload utils on initialize (in case of server restart)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
time_start = time.time()
|
||||
message = ""
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
self.log.debug("Preparing entities for synchronization")
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = entities[0]["project"]["full_name"]
|
||||
|
||||
project_entities = session.query(
|
||||
self.entities_query.format(ft_project_name)
|
||||
).all()
|
||||
|
||||
ft_project = session.query(
|
||||
self.project_query.format(ft_project_name)
|
||||
).one()
|
||||
|
||||
entities_by_id = {}
|
||||
entities_by_parent = collections.defaultdict(list)
|
||||
|
||||
entities_by_id[ft_project["id"]] = ft_project
|
||||
for ent in project_entities:
|
||||
entities_by_id[ent["id"]] = ent
|
||||
entities_by_parent[ent["parent_id"]].append(ent)
|
||||
|
||||
importable = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
ent = entities_by_id[ent_info["entityId"]]
|
||||
for link_ent_info in ent["link"]:
|
||||
link_ent = entities_by_id[link_ent_info["id"]]
|
||||
if (
|
||||
ent.entity_type.lower() in self.ignore_entity_types or
|
||||
link_ent in importable
|
||||
):
|
||||
continue
|
||||
|
||||
importable.append(link_ent)
|
||||
|
||||
def add_children(parent_id):
|
||||
ents = entities_by_parent[parent_id]
|
||||
for ent in ents:
|
||||
if ent.entity_type.lower() in self.ignore_entity_types:
|
||||
continue
|
||||
|
||||
if ent not in importable:
|
||||
importable.append(ent)
|
||||
|
||||
add_children(ent["id"])
|
||||
|
||||
# add children of selection to importable
|
||||
for ent_info in event["data"]["selection"]:
|
||||
add_children(ent_info["entityId"])
|
||||
|
||||
# Check names: REGEX in schema/duplicates - raise error if found
|
||||
all_names = []
|
||||
duplicates = []
|
||||
|
||||
for entity in importable:
|
||||
ftracklib.avalon_check_name(entity)
|
||||
if entity.entity_type.lower() == "project":
|
||||
continue
|
||||
|
||||
if entity['name'] in all_names:
|
||||
duplicates.append("'{}'".format(entity['name']))
|
||||
else:
|
||||
all_names.append(entity['name'])
|
||||
|
||||
if len(duplicates) > 0:
|
||||
# TODO Show information to user and return False
|
||||
raise ValueError(
|
||||
"Entity name duplication: {}".format(", ".join(duplicates))
|
||||
)
|
||||
|
||||
# ----- PROJECT ------
|
||||
avalon_project = ftracklib.get_avalon_project(ft_project)
|
||||
custom_attributes = ftracklib.get_avalon_attr(session)
|
||||
|
||||
# Import all entities to Avalon DB
|
||||
for entity in importable:
|
||||
result = ftracklib.import_to_avalon(
|
||||
session=session,
|
||||
entity=entity,
|
||||
ft_project=ft_project,
|
||||
av_project=avalon_project,
|
||||
custom_attributes=custom_attributes
|
||||
)
|
||||
# TODO better error handling
|
||||
# maybe split into critical, warnings and messages?
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
ftracklib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
}
|
||||
|
||||
if avalon_project is None:
|
||||
if 'project' in result:
|
||||
avalon_project = result['project']
|
||||
|
||||
job['status'] = 'done'
|
||||
|
||||
except ValueError as ve:
|
||||
# TODO remove this part!!!!
|
||||
job['status'] = 'failed'
|
||||
message = str(ve)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(message),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
job['status'] = 'failed'
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
log_message = "{}/{}/Line: {}".format(
|
||||
exc_type, fname, exc_tb.tb_lineno
|
||||
)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(log_message),
|
||||
exc_info=True
|
||||
)
|
||||
# TODO add traceback to message and show to user
|
||||
message = (
|
||||
'Unexpected Error'
|
||||
' - Please check Log for more information'
|
||||
)
|
||||
finally:
|
||||
if job['status'] in ['queued', 'running']:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
time_end = time.time()
|
||||
self.log.debug("Synchronization took \"{}\"".format(
|
||||
str(time_end - time_start)
|
||||
))
|
||||
|
||||
if job["status"] != "failed":
|
||||
self.log.debug("Triggering Sync hierarchical attributes")
|
||||
self.trigger_action("sync.hierarchical.attrs.local", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
return {
|
||||
'success': False,
|
||||
'message': message
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': "Synchronization was successfull"
|
||||
}
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
|
@ -6,7 +6,7 @@ import collections
|
|||
import json
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import io, inventory, schema
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import logging
|
||||
import json
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -43,7 +43,7 @@ class ThumbToChildren(BaseAction):
|
|||
'description': 'Push thumbnails to Childrens'
|
||||
})
|
||||
})
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
for entity in entities:
|
||||
thumbid = entity['thumbnail_id']
|
||||
|
|
@ -53,10 +53,11 @@ class ThumbToChildren(BaseAction):
|
|||
|
||||
# inform the user that the job is done
|
||||
job['status'] = 'done'
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
session.rollback()
|
||||
# fail the job if something goes wrong
|
||||
job['status'] = 'failed'
|
||||
raise
|
||||
raise exc
|
||||
finally:
|
||||
session.commit()
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -40,9 +40,9 @@ class ThumbToParent(BaseAction):
|
|||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Push thumbnails to parents'
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
for entity in entities:
|
||||
parent = None
|
||||
|
|
@ -74,10 +74,11 @@ class ThumbToParent(BaseAction):
|
|||
# inform the user that the job is done
|
||||
job['status'] = status or 'done'
|
||||
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
session.rollback()
|
||||
# fail the job if something goes wrong
|
||||
job['status'] = 'failed'
|
||||
raise e
|
||||
raise exc
|
||||
|
||||
finally:
|
||||
session.commit()
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import collections
|
|||
import json
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import io, inventory, schema
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import os
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class ActionAskWhereIRun(BaseAction):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import platform
|
||||
import socket
|
||||
import getpass
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,383 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseAction):
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Server)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
config.get_presets().get('services', {}).get(
|
||||
'statics_server', {}
|
||||
).get('default_port', 8021)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def register(self):
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.discover',
|
||||
self._discover
|
||||
)
|
||||
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
|
||||
self.identifier
|
||||
),
|
||||
self._launch
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
role_check = False
|
||||
discover = False
|
||||
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in role_list:
|
||||
role_check = True
|
||||
break
|
||||
|
||||
if role_check is True:
|
||||
for entity in entities:
|
||||
context_type = entity.get('context_type', '').lower()
|
||||
if (
|
||||
context_type in ('show', 'task') and
|
||||
entity.entity_type.lower() != 'task'
|
||||
):
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Hierachical attributes'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
).format(cust_attr['label']))
|
||||
continue
|
||||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
return {
|
||||
'success': True,
|
||||
'message': msg
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() == 'project':
|
||||
project_name = entity['full_name']
|
||||
else:
|
||||
project_name = entity['project']['full_name']
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, entity.get('name', entity)))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
self.db_con.uninstall()
|
||||
|
||||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
if self.interface_messages:
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages,
|
||||
title="something went wrong",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_value(self, key, entity):
|
||||
value = entity['custom_attributes'][key]
|
||||
if (
|
||||
value is not None or
|
||||
entity.entity_type.lower() == 'project'
|
||||
):
|
||||
return value
|
||||
|
||||
return self.get_hierarchical_value(key, entity['parent'])
|
||||
|
||||
def update_hierarchical_attribute(self, entity, key, value):
|
||||
if (
|
||||
entity['context_type'].lower() not in ('show', 'task') or
|
||||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Change value if entity has set it's own
|
||||
entity_value = custom_attributes[key]
|
||||
if entity_value is not None:
|
||||
value = entity_value
|
||||
|
||||
data = mongo_entity.get('data') or {}
|
||||
|
||||
data[key] = value
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -1,338 +1,227 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import collections
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
class SyncToAvalonServer(BaseAction):
|
||||
"""
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Silo(string) - Last parent except project
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
|
||||
* All Custom attributes in group 'Avalon'
|
||||
- custom attributes that start with 'avalon_' are skipped
|
||||
|
||||
* These information are stored also for all parents and children entities.
|
||||
* These information are stored for entities in whole project.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
|
||||
Avalon ID of asset is stored to Ftrack
|
||||
- Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action or do it manually (Not recommended)
|
||||
|
||||
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
|
||||
- name, parents and silo are checked -> shows error if are not exact the same
|
||||
- after sync it is not allowed to change names or move entities
|
||||
|
||||
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
|
||||
- tries to find entity by name
|
||||
- found:
|
||||
- raise error if ftrackId/visual parent/parents are not same
|
||||
- not found:
|
||||
- Creates asset/project
|
||||
|
||||
'''
|
||||
|
||||
- run 'Create Custom Attributes' action
|
||||
- or do it manually (Not recommended)
|
||||
"""
|
||||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon'
|
||||
identifier = "sync.to.avalon.server"
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = "- Sync To Avalon (Server)"
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
description = "Send data from Ftrack to Avalon"
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
config.get_presets().get('services', {}).get(
|
||||
'statics_server', {}
|
||||
).get('default_port', 8021)
|
||||
"PYPE_STATICS_SERVER",
|
||||
"http://localhost:{}".format(
|
||||
config.get_presets().get("services", {}).get(
|
||||
"rest_api", {}
|
||||
).get("default_port", 8021)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
project_query = (
|
||||
"select full_name, name, custom_attributes"
|
||||
", project_schema._task_type_schema.types.name"
|
||||
" from Project where full_name is \"{}\""
|
||||
)
|
||||
|
||||
entities_query = (
|
||||
"select id, name, parent_id, link, custom_attributes"
|
||||
" from TypedContext where project.full_name is \"{}\""
|
||||
)
|
||||
|
||||
# Entity type names(lowered) that won't be synchronized with their children
|
||||
ignore_entity_types = ["task", "milestone"]
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
|
||||
|
||||
def register(self):
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.discover',
|
||||
self._discover
|
||||
"topic=ftrack.action.discover",
|
||||
self._discover,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.launch and data.actionIdentifier={0}'.format(
|
||||
self.identifier
|
||||
),
|
||||
self._launch
|
||||
)
|
||||
launch_subscription = (
|
||||
"topic=ftrack.action.launch and data.actionIdentifier={0}"
|
||||
).format(self.identifier)
|
||||
self.session.event_hub.subscribe(launch_subscription, self._launch)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
roleCheck = False
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
roleCheck = True
|
||||
""" Validation """
|
||||
# Check if selection is valid
|
||||
valid_selection = False
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() in ["show", "task"]:
|
||||
valid_selection = True
|
||||
break
|
||||
if roleCheck is True:
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
if not valid_selection:
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# Get user and check his roles
|
||||
user_id = event.get("source", {}).get("user", {}).get("id")
|
||||
if not user_id:
|
||||
return False
|
||||
|
||||
user = session.query("User where id is \"{}\"".format(user_id)).first()
|
||||
if not user:
|
||||
return False
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
for role in user["user_security_roles"]:
|
||||
if role["security_role"]["name"] in role_list:
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
time_start = time.time()
|
||||
message = ""
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
self.show_message(event, "Synchronization - Preparing data", True)
|
||||
# Get ftrack project
|
||||
if in_entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = in_entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = in_entities[0]["project"]["full_name"]
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
self.log.debug("Preparing entities for synchronization")
|
||||
self.entities_factory.launch_setup(ft_project_name)
|
||||
time_1 = time.time()
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = entities[0]["project"]["full_name"]
|
||||
self.entities_factory.set_cutom_attributes()
|
||||
time_2 = time.time()
|
||||
|
||||
project_entities = session.query(
|
||||
self.entities_query.format(ft_project_name)
|
||||
).all()
|
||||
# This must happen before all filtering!!!
|
||||
self.entities_factory.prepare_avalon_entities(ft_project_name)
|
||||
time_3 = time.time()
|
||||
|
||||
ft_project = session.query(
|
||||
self.project_query.format(ft_project_name)
|
||||
).one()
|
||||
self.entities_factory.filter_by_ignore_sync()
|
||||
time_4 = time.time()
|
||||
|
||||
entities_by_id = {}
|
||||
entities_by_parent = collections.defaultdict(list)
|
||||
self.entities_factory.duplicity_regex_check()
|
||||
time_5 = time.time()
|
||||
|
||||
entities_by_id[ft_project["id"]] = ft_project
|
||||
for ent in project_entities:
|
||||
entities_by_id[ent["id"]] = ent
|
||||
entities_by_parent[ent["parent_id"]].append(ent)
|
||||
self.entities_factory.prepare_ftrack_ent_data()
|
||||
time_6 = time.time()
|
||||
|
||||
importable = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
ent = entities_by_id[ent_info["entityId"]]
|
||||
for link_ent_info in ent["link"]:
|
||||
link_ent = entities_by_id[link_ent_info["id"]]
|
||||
if (
|
||||
ent.entity_type.lower() in self.ignore_entity_types or
|
||||
link_ent in importable
|
||||
):
|
||||
continue
|
||||
self.entities_factory.synchronize()
|
||||
time_7 = time.time()
|
||||
|
||||
importable.append(link_ent)
|
||||
self.log.debug(
|
||||
"*** Synchronization finished ***"
|
||||
)
|
||||
self.log.debug(
|
||||
"preparation <{}>".format(time_1 - time_start)
|
||||
)
|
||||
self.log.debug(
|
||||
"set_cutom_attributes <{}>".format(time_2 - time_1)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_avalon_entities <{}>".format(time_3 - time_2)
|
||||
)
|
||||
self.log.debug(
|
||||
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
|
||||
)
|
||||
self.log.debug(
|
||||
"duplicity_regex_check <{}>".format(time_5 - time_4)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
|
||||
)
|
||||
self.log.debug(
|
||||
"synchronize <{}>".format(time_7 - time_6)
|
||||
)
|
||||
self.log.debug(
|
||||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
def add_children(parent_id):
|
||||
ents = entities_by_parent[parent_id]
|
||||
for ent in ents:
|
||||
if ent.entity_type.lower() in self.ignore_entity_types:
|
||||
continue
|
||||
|
||||
if ent not in importable:
|
||||
importable.append(ent)
|
||||
|
||||
add_children(ent["id"])
|
||||
|
||||
# add children of selection to importable
|
||||
for ent_info in event["data"]["selection"]:
|
||||
add_children(ent_info["entityId"])
|
||||
|
||||
# Check names: REGEX in schema/duplicates - raise error if found
|
||||
all_names = []
|
||||
duplicates = []
|
||||
|
||||
for entity in importable:
|
||||
lib.avalon_check_name(entity)
|
||||
if entity.entity_type.lower() == "project":
|
||||
continue
|
||||
|
||||
if entity['name'] in all_names:
|
||||
duplicates.append("'{}'".format(entity['name']))
|
||||
else:
|
||||
all_names.append(entity['name'])
|
||||
|
||||
if len(duplicates) > 0:
|
||||
# TODO Show information to user and return False
|
||||
raise ValueError(
|
||||
"Entity name duplication: {}".format(", ".join(duplicates))
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
ft_project_name
|
||||
)
|
||||
|
||||
# ----- PROJECT ------
|
||||
avalon_project = lib.get_avalon_project(ft_project)
|
||||
custom_attributes = lib.get_avalon_attr(session)
|
||||
|
||||
# Import all entities to Avalon DB
|
||||
for entity in importable:
|
||||
result = lib.import_to_avalon(
|
||||
session=session,
|
||||
entity=entity,
|
||||
ft_project=ft_project,
|
||||
av_project=avalon_project,
|
||||
custom_attributes=custom_attributes
|
||||
self.show_interface(
|
||||
items=report["items"],
|
||||
title=report.get("title", default_title),
|
||||
event=event
|
||||
)
|
||||
# TODO better error handling
|
||||
# maybe split into critical, warnings and messages?
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
lib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
}
|
||||
|
||||
if avalon_project is None:
|
||||
if 'project' in result:
|
||||
avalon_project = result['project']
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except ValueError as ve:
|
||||
# TODO remove this part!!!!
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
message = str(ve)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(message),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
log_message = "{}/{}/Line: {}".format(
|
||||
exc_type, fname, exc_tb.tb_lineno
|
||||
)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(log_message),
|
||||
exc_info=True
|
||||
)
|
||||
# TODO add traceback to message and show to user
|
||||
message = (
|
||||
'Unexpected Error'
|
||||
' - Please check Log for more information'
|
||||
)
|
||||
|
||||
finally:
|
||||
if job['status'] in ['queued', 'running']:
|
||||
job['status'] = 'failed'
|
||||
|
||||
session.commit()
|
||||
|
||||
time_end = time.time()
|
||||
self.log.debug("Synchronization took \"{}\"".format(
|
||||
str(time_end - time_start)
|
||||
))
|
||||
|
||||
if job["status"] != "failed":
|
||||
self.log.debug("Triggering Sync hierarchical attributes")
|
||||
self.trigger_action("sync.hierarchical.attrs", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
return {
|
||||
'success': False,
|
||||
'message': message
|
||||
"success": True,
|
||||
"message": "Synchronization Finished"
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': "Synchronization was successfull"
|
||||
}
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Synchronization failed due to code error", exc_info=True
|
||||
)
|
||||
msg = "An error has happened during synchronization"
|
||||
title = "Synchronization report ({}):".format(ft_project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## Traceback of the error"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p>{}</p>".format(
|
||||
str(traceback.format_exc()).replace(
|
||||
"\n", "<br>").replace(
|
||||
" ", " "
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
report = {"items": []}
|
||||
try:
|
||||
report = self.entities_factory.report()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_items = report.get("items", [])
|
||||
if _items:
|
||||
items.append(self.entities_factory.report_splitter)
|
||||
items.extend(_items)
|
||||
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
finally:
|
||||
try:
|
||||
self.entities_factory.dbcon.uninstall()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.entities_factory.session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
# assume that register is being called from an old or incompatible API and
|
||||
# return without doing anything.
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
SyncToAvalonServer(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, get_ca_mongoid
|
||||
from pype.ftrack.events.event_sync_to_avalon import Sync_to_Avalon
|
||||
from pype.ftrack.lib import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent
|
||||
|
||||
|
||||
class DelAvalonIdFromNew(BaseEvent):
|
||||
|
|
@ -11,7 +11,8 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
Priority of this event must be less than SyncToAvalon event
|
||||
'''
|
||||
priority = Sync_to_Avalon.priority - 1
|
||||
priority = SyncToAvalonEvent.priority - 1
|
||||
ignore_me = True
|
||||
|
||||
def launch(self, session, event):
|
||||
created = []
|
||||
|
|
@ -28,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
elif (
|
||||
entity.get('action', None) == 'update' and
|
||||
get_ca_mongoid() in entity['keys'] and
|
||||
CustAttrIdKey in entity['keys'] and
|
||||
entity_id in created
|
||||
):
|
||||
ftrack_entity = session.get(
|
||||
|
|
@ -37,13 +38,11 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
)
|
||||
|
||||
cust_attr = ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
CustAttrIdKey
|
||||
]
|
||||
|
||||
if cust_attr != '':
|
||||
ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
] = ''
|
||||
ftrack_entity['custom_attributes'][CustAttrIdKey] = ''
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
|
|
@ -53,5 +52,4 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
DelAvalonIdFromNew(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
import operator
|
||||
|
||||
|
|
@ -80,10 +80,10 @@ class NextTaskUpdate(BaseEvent):
|
|||
'>>> [ {} ] updated to [ Ready ]'
|
||||
).format(path))
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
self.log.warning((
|
||||
'!!! [ {} ] status couldnt be set: [ {} ]'
|
||||
).format(path, e))
|
||||
session.rollback()
|
||||
).format(path, str(e)), exc_info=True)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
import ftrack_api
|
||||
from pype.ftrack.lib import BaseEvent
|
||||
|
||||
|
||||
class Radio_buttons(BaseEvent):
|
||||
class RadioButtons(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
|
@ -37,4 +37,4 @@ class Radio_buttons(BaseEvent):
|
|||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
Radio_buttons(session, plugins_presets).register()
|
||||
RadioButtons(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,213 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseEvent):
|
||||
# After sync to avalon event!
|
||||
priority = 101
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def launch(self, session, event):
|
||||
# Filter entities and changed values if it makes sence to run script
|
||||
processable = []
|
||||
processable_ent = {}
|
||||
for ent in event['data']['entities']:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent['entityType'].lower() not in ['task', 'show']:
|
||||
continue
|
||||
|
||||
action = ent.get("action")
|
||||
# skip if remove (Entity does not exist in Ftrack)
|
||||
if action == "remove":
|
||||
continue
|
||||
|
||||
# When entity was add we don't care about keys
|
||||
if action != "add":
|
||||
keys = ent.get('keys')
|
||||
if not keys:
|
||||
continue
|
||||
|
||||
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||
processable.append(ent)
|
||||
|
||||
processable_ent[ent['entityId']] = {
|
||||
"entity": entity,
|
||||
"action": action,
|
||||
"link": entity["link"]
|
||||
}
|
||||
|
||||
if not processable:
|
||||
return True
|
||||
|
||||
# Find project of entities
|
||||
ft_project = None
|
||||
for entity_dict in processable_ent.values():
|
||||
try:
|
||||
base_proj = entity_dict['link'][0]
|
||||
except Exception:
|
||||
continue
|
||||
ft_project = session.get(base_proj['type'], base_proj['id'])
|
||||
break
|
||||
|
||||
# check if project is set to auto-sync
|
||||
if (
|
||||
ft_project is None or
|
||||
'avalon_auto_sync' not in ft_project['custom_attributes'] or
|
||||
ft_project['custom_attributes']['avalon_auto_sync'] is False
|
||||
):
|
||||
return True
|
||||
|
||||
# Get hierarchical custom attributes from "avalon" group
|
||||
custom_attributes = {}
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
if not custom_attributes:
|
||||
return True
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name']
|
||||
|
||||
for ent in processable:
|
||||
entity_dict = processable_ent[ent['entityId']]
|
||||
|
||||
entity = entity_dict["entity"]
|
||||
ent_path = "/".join([ent["name"] for ent in entity_dict['link']])
|
||||
action = entity_dict["action"]
|
||||
|
||||
keys_to_process = {}
|
||||
if action == "add":
|
||||
# Store all custom attributes when entity was added
|
||||
for key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
else:
|
||||
# Update only updated keys
|
||||
for key in ent['keys']:
|
||||
if key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
|
||||
processed_keys = self.get_hierarchical_values(
|
||||
keys_to_process, entity
|
||||
)
|
||||
# Do the processing of values
|
||||
self.update_hierarchical_attribute(entity, processed_keys, ent_path)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_values(self, keys_dict, entity):
|
||||
# check already set values
|
||||
_set_keys = []
|
||||
for key, value in keys_dict.items():
|
||||
if value is not None:
|
||||
_set_keys.append(key)
|
||||
|
||||
# pop set values from keys_dict
|
||||
set_keys = {}
|
||||
for key in _set_keys:
|
||||
set_keys[key] = keys_dict.pop(key)
|
||||
|
||||
# find if entity has set values and pop them out
|
||||
keys_to_pop = []
|
||||
for key in keys_dict.keys():
|
||||
_val = entity["custom_attributes"][key]
|
||||
if _val:
|
||||
keys_to_pop.append(key)
|
||||
set_keys[key] = _val
|
||||
|
||||
for key in keys_to_pop:
|
||||
keys_dict.pop(key)
|
||||
|
||||
# if there are not keys to find value return found
|
||||
if not keys_dict:
|
||||
return set_keys
|
||||
|
||||
# end recursion if entity is project
|
||||
if entity.entity_type.lower() == "project":
|
||||
for key, value in keys_dict.items():
|
||||
set_keys[key] = value
|
||||
|
||||
else:
|
||||
result = self.get_hierarchical_values(keys_dict, entity["parent"])
|
||||
for key, value in result.items():
|
||||
set_keys[key] = value
|
||||
|
||||
return set_keys
|
||||
|
||||
def update_hierarchical_attribute(self, entity, keys_dict, ent_path):
|
||||
# TODO store all keys at once for entity
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
return
|
||||
|
||||
changed_keys = {}
|
||||
data = mongo_entity.get('data') or {}
|
||||
for key, value in keys_dict.items():
|
||||
cur_value = data.get(key)
|
||||
if cur_value:
|
||||
if cur_value == value:
|
||||
continue
|
||||
changed_keys[key] = value
|
||||
data[key] = value
|
||||
|
||||
if not changed_keys:
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"{} - updated hierarchical attributes: {}".format(
|
||||
ent_path, str(changed_keys)
|
||||
)
|
||||
)
|
||||
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
_keys_dict = {}
|
||||
for key, value in keys_dict.items():
|
||||
if key not in child.get('custom_attributes', {}):
|
||||
continue
|
||||
child_value = child['custom_attributes'][key]
|
||||
if child_value is not None:
|
||||
continue
|
||||
_keys_dict[key] = value
|
||||
|
||||
if not _keys_dict:
|
||||
continue
|
||||
child_path = "/".join([ent["name"] for ent in child['link']])
|
||||
self.update_hierarchical_attribute(child, _keys_dict, child_path)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
class Test_Event(BaseEvent):
|
||||
class TestEvent(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
|
@ -23,4 +23,4 @@ class Test_Event(BaseEvent):
|
|||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
Test_Event(session, plugins_presets).register()
|
||||
TestEvent(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
|
|
@ -26,33 +25,41 @@ class ThumbnailEvents(BaseEvent):
|
|||
# Update task thumbnail from published version
|
||||
# if (entity['entityType'] == 'assetversion' and
|
||||
# entity['action'] == 'encoded'):
|
||||
if (
|
||||
entity['entityType'] == 'assetversion'
|
||||
and 'thumbid' in (entity.get('keys') or [])
|
||||
elif (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
entity['action'] != 'remove' and
|
||||
'thumbid' in (entity.get('keys') or [])
|
||||
):
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
if not version:
|
||||
continue
|
||||
|
||||
thumbnail = version.get('thumbnail')
|
||||
if thumbnail:
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
if not thumbnail:
|
||||
continue
|
||||
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
|
||||
self.log.info(msg)
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
|
||||
session.commit()
|
||||
self.log.info(msg)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
ThumbnailEvents(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from pype.ftrack import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
|
||||
|
||||
class UserAssigmentEvent(BaseEvent):
|
||||
|
|
@ -36,7 +39,6 @@ class UserAssigmentEvent(BaseEvent):
|
|||
"""
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def error(self, *err):
|
||||
for e in err:
|
||||
|
|
@ -105,7 +107,7 @@ class UserAssigmentEvent(BaseEvent):
|
|||
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
|
||||
|
||||
avalon_entity = None
|
||||
parent_id = parent['custom_attributes'].get(self.ca_mongoid)
|
||||
parent_id = parent['custom_attributes'].get(CustAttrIdKey)
|
||||
if parent_id:
|
||||
parent_id = ObjectId(parent_id)
|
||||
avalon_entity = self.db_con.find_one({
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
|
|
@ -6,7 +6,6 @@ class VersionToTaskStatus(BaseEvent):
|
|||
|
||||
def launch(self, session, event):
|
||||
'''Propagates status from version to task when changed'''
|
||||
session.commit()
|
||||
|
||||
# start of event procedure ----------------------------------
|
||||
for entity in event['data'].get('entities', []):
|
||||
|
|
@ -62,8 +61,10 @@ class VersionToTaskStatus(BaseEvent):
|
|||
task['status'] = task_status
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
self.log.warning('!!! [ {} ] status couldnt be set:\
|
||||
[ {} ]'.format(path, e))
|
||||
session.rollback()
|
||||
else:
|
||||
self.log.info('>>> [ {} ] updated to [ {} ]'.format(
|
||||
path, task_status['name']))
|
||||
|
|
|
|||
|
|
@ -1 +1,2 @@
|
|||
from .ftrack_server import FtrackServer
|
||||
from .lib import check_ftrack_url
|
||||
|
|
|
|||
|
|
@ -9,11 +9,12 @@ import atexit
|
|||
import time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack.lib import credentials
|
||||
from pype.ftrack.ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings
|
||||
from pype.ftrack.ftrack_server.lib import (
|
||||
ftrack_events_mongo_settings, check_ftrack_url
|
||||
)
|
||||
import socket_thread
|
||||
|
||||
|
||||
|
|
@ -25,36 +26,6 @@ class MongoPermissionsError(Exception):
|
|||
super().__init__(message)
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if not url:
|
||||
print('ERROR: Ftrack URL is not set!')
|
||||
return None
|
||||
|
||||
url = url.strip('/ ')
|
||||
|
||||
if 'http' not in url:
|
||||
if url.endswith('ftrackapp.com'):
|
||||
url = 'https://' + url
|
||||
else:
|
||||
url = 'https://{0}.ftrackapp.com'.format(url)
|
||||
try:
|
||||
result = requests.get(url, allow_redirects=False)
|
||||
except requests.exceptions.RequestException:
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def check_mongo_url(host, port, log_error=False):
|
||||
"""Checks if mongo server is responding"""
|
||||
sock = None
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import types
|
||||
import importlib
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
import time
|
||||
import logging
|
||||
import inspect
|
||||
|
|
@ -100,7 +100,10 @@ class FtrackServer:
|
|||
log.warning(msg, exc_info=e)
|
||||
|
||||
if len(register_functions_dict) < 1:
|
||||
raise Exception
|
||||
raise Exception((
|
||||
"There are no events with register function."
|
||||
" Registered paths: \"{}\""
|
||||
).format("| ".join(paths)))
|
||||
|
||||
# Load presets for setting plugins
|
||||
key = "user"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import requests
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
except ImportError:
|
||||
|
|
@ -66,3 +67,33 @@ def get_ftrack_event_mongo_info():
|
|||
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
|
||||
|
||||
return url, database, collection
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if not url:
|
||||
print('ERROR: Ftrack URL is not set!')
|
||||
return None
|
||||
|
||||
url = url.strip('/ ')
|
||||
|
||||
if 'http' not in url:
|
||||
if url.endswith('ftrackapp.com'):
|
||||
url = 'https://' + url
|
||||
else:
|
||||
url = 'https://{0}.ftrackapp.com'.format(url)
|
||||
try:
|
||||
result = requests.get(url, allow_redirects=False)
|
||||
except requests.exceptions.RequestException:
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
|
||||
return url
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub):
|
|||
def prepare_dbcon(self):
|
||||
try:
|
||||
self.dbcon.install()
|
||||
self.dbcon._database.collection_names()
|
||||
self.dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
|
|
|
|||
|
|
@ -21,11 +21,23 @@ class StorerEventHub(ftrack_api.event.hub.EventHub):
|
|||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
if self._code_name_mapping[code] == "heartbeat":
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"storer")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
elif code_name == "connect":
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="pype.storer.started",
|
||||
data={},
|
||||
source={
|
||||
"id": self.id,
|
||||
"user": {"username": self._api_user}
|
||||
}
|
||||
)
|
||||
self._event_queue.put(event)
|
||||
|
||||
return super(StorerEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -30,8 +30,7 @@ def main(args):
|
|||
server.run_server(session)
|
||||
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
traceback.print_tb(exc.__traceback__)
|
||||
log.error("Event server crashed. See traceback below", exc_info=True)
|
||||
|
||||
finally:
|
||||
log.debug("First closing socket")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import signal
|
|||
import socket
|
||||
import pymongo
|
||||
|
||||
import ftrack_api
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
|
@ -15,6 +16,13 @@ log = Logger().get_logger("Event storer")
|
|||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
|
||||
class SessionClass:
|
||||
def __init__(self):
|
||||
self.session = None
|
||||
|
||||
|
||||
session_obj = SessionClass()
|
||||
dbcon = DbConnector(
|
||||
mongo_url=url,
|
||||
database_name=database,
|
||||
|
|
@ -24,10 +32,11 @@ dbcon = DbConnector(
|
|||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
||||
|
||||
def install_db():
|
||||
try:
|
||||
dbcon.install()
|
||||
dbcon._database.collection_names()
|
||||
dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
|
|
@ -49,7 +58,7 @@ def launch(event):
|
|||
|
||||
try:
|
||||
# dbcon.insert_one(event_data)
|
||||
dbcon.update({"id": event_id}, event_data, upsert=True)
|
||||
dbcon.replace_one({"id": event_id}, event_data, upsert=True)
|
||||
log.debug("Event: {} stored".format(event_id))
|
||||
|
||||
except pymongo.errors.AutoReconnect:
|
||||
|
|
@ -65,10 +74,71 @@ def launch(event):
|
|||
)
|
||||
|
||||
|
||||
def trigger_sync(event):
|
||||
session = session_obj.session
|
||||
if session is None:
|
||||
log.warning("Session is not set. Can't trigger Sync to avalon action.")
|
||||
return True
|
||||
|
||||
projects = session.query("Project").all()
|
||||
if not projects:
|
||||
return True
|
||||
|
||||
query = {
|
||||
"pype_data.is_processed": False,
|
||||
"topic": "ftrack.action.launch",
|
||||
"data.actionIdentifier": "sync.to.avalon.server"
|
||||
}
|
||||
set_dict = {
|
||||
"$set": {"pype_data.is_processed": True}
|
||||
}
|
||||
dbcon.update_many(query, set_dict)
|
||||
|
||||
selections = []
|
||||
for project in projects:
|
||||
if project["status"] != "active":
|
||||
continue
|
||||
|
||||
auto_sync = project["custom_attributes"].get("avalon_auto_sync")
|
||||
if not auto_sync:
|
||||
continue
|
||||
|
||||
selections.append({
|
||||
"entityId": project["id"],
|
||||
"entityType": "show"
|
||||
})
|
||||
|
||||
if not selections:
|
||||
return
|
||||
|
||||
user = session.query(
|
||||
"User where username is \"{}\"".format(session.api_user)
|
||||
).one()
|
||||
user_data = {
|
||||
"username": user["username"],
|
||||
"id": user["id"]
|
||||
}
|
||||
|
||||
for selection in selections:
|
||||
event_data = {
|
||||
"actionIdentifier": "sync.to.avalon.server",
|
||||
"selection": [selection]
|
||||
}
|
||||
session.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic="ftrack.action.launch",
|
||||
data=event_data,
|
||||
source=dict(user=user_data)
|
||||
),
|
||||
on_error="ignore"
|
||||
)
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Registers the event, subscribing the discover and launch topics.'''
|
||||
install_db()
|
||||
session.event_hub.subscribe("topic=*", launch)
|
||||
session.event_hub.subscribe("topic=pype.storer.started", trigger_sync)
|
||||
|
||||
|
||||
def main(args):
|
||||
|
|
@ -85,6 +155,7 @@ def main(args):
|
|||
|
||||
try:
|
||||
session = StorerSession(auto_connect_event_hub=True, sock=sock)
|
||||
session_obj.session = session
|
||||
register(session)
|
||||
server = FtrackServer("event")
|
||||
log.debug("Launched Ftrack Event storer")
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ import signal
|
|||
import threading
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.vendor.ftrack_api.event.hub import EventHub
|
||||
import ftrack_api
|
||||
from ftrack_api.event.hub import EventHub
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event Server Legacy")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from .avalon_sync import *
|
||||
from . import avalon_sync
|
||||
from .credentials import *
|
||||
from .ftrack_app_handler import *
|
||||
from .ftrack_event_handler import *
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
import appdirs
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,12 @@ import pymongo
|
|||
from pymongo.client_session import ClientSession
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
pass
|
||||
def __init__(self, *args, **kwargs):
|
||||
msg = "Active table is not set. (This is bug)"
|
||||
if not (args or kwargs):
|
||||
args = (default_message,)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
|
|
@ -37,7 +42,16 @@ def auto_reconnect(func):
|
|||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
return decorated
|
||||
|
||||
|
||||
def check_active_table(func):
|
||||
"""Check if DbConnector has active table before db method is called"""
|
||||
@functools.wraps(func)
|
||||
def decorated(obj, *args, **kwargs):
|
||||
if not obj.active_table:
|
||||
raise NotActiveTable()
|
||||
return func(obj, *args, **kwargs)
|
||||
return decorated
|
||||
|
||||
|
||||
|
|
@ -53,7 +67,6 @@ def check_active_table(func):
|
|||
|
||||
|
||||
class DbConnector:
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
timeout = 1000
|
||||
|
||||
|
|
@ -68,10 +81,18 @@ class DbConnector:
|
|||
|
||||
self.active_table = table_name
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super().__getattribute__(attr)
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
if self.active_table is None:
|
||||
raise NotActiveTable()
|
||||
return self._database[self.active_table].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
|
|
@ -131,6 +152,15 @@ class DbConnector:
|
|||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def tables(self):
|
||||
"""List available tables
|
||||
Returns:
|
||||
|
|
@ -166,18 +196,21 @@ class DbConnector:
|
|||
@check_active_table
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None, **options):
|
||||
options["projection"] = projection
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find(filter, **options)
|
||||
return self._database[self.active_table].find(
|
||||
filter, projection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None, **options):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
options["projection"] = projection
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find_one(filter, **options)
|
||||
return self._database[self.active_table].find_one(
|
||||
filter,
|
||||
projection,
|
||||
**options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
|
|
@ -202,8 +235,8 @@ class DbConnector:
|
|||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.active_table].distinct(*args, **kwargs)
|
||||
def distinct(self, **options):
|
||||
return self._database[self.active_table].distinct(**options)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
|
|
@ -216,10 +249,14 @@ class DbConnector:
|
|||
@auto_reconnect
|
||||
def delete_one(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_one(filter, **options)
|
||||
return self._database[self.active_table].delete_one(
|
||||
filter, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def delete_many(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_many(filter, **options)
|
||||
return self._database[self.active_table].delete_many(
|
||||
filter, **options
|
||||
)
|
||||
|
|
|
|||
|
|
@ -345,25 +345,44 @@ class AppAction(BaseHandler):
|
|||
statuses = presets['status_update']
|
||||
|
||||
actual_status = entity['status']['name'].lower()
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if actual_status in value or '_any_' in value:
|
||||
if key != '_ignore_':
|
||||
next_status_name = key
|
||||
already_tested = []
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in entity['link']]
|
||||
)
|
||||
while True:
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if key in already_tested:
|
||||
continue
|
||||
if actual_status in value or '_any_' in value:
|
||||
if key != '_ignore_':
|
||||
next_status_name = key
|
||||
already_tested.append(key)
|
||||
break
|
||||
already_tested.append(key)
|
||||
|
||||
if next_status_name is None:
|
||||
break
|
||||
|
||||
if next_status_name is not None:
|
||||
try:
|
||||
query = 'Status where name is "{}"'.format(
|
||||
next_status_name
|
||||
)
|
||||
status = session.query(query).one()
|
||||
|
||||
entity['status'] = status
|
||||
session.commit()
|
||||
self.log.debug("Changing status to \"{}\" <{}>".format(
|
||||
next_status_name, ent_path
|
||||
))
|
||||
break
|
||||
|
||||
except Exception:
|
||||
session.rollback()
|
||||
msg = (
|
||||
'Status "{}" in presets wasn\'t found on Ftrack'
|
||||
).format(next_status_name)
|
||||
'Status "{}" in presets wasn\'t found'
|
||||
' on Ftrack entity type "{}"'
|
||||
).format(next_status_name, entity.entity_type)
|
||||
self.log.warning(msg)
|
||||
|
||||
# Set origin avalon environments
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import functools
|
||||
import time
|
||||
from pypeapp import Logger
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
import ftrack_api
|
||||
from pype.ftrack.ftrack_server import session_processor
|
||||
|
||||
|
||||
|
|
@ -13,6 +12,13 @@ class MissingPermision(Exception):
|
|||
super().__init__(message)
|
||||
|
||||
|
||||
class PreregisterException(Exception):
|
||||
def __init__(self, message=None):
|
||||
if not message:
|
||||
message = "Pre-registration conditions were not met"
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
'''Custom Action base class
|
||||
|
||||
|
|
@ -89,15 +95,17 @@ class BaseHandler(object):
|
|||
'!{} "{}" - You\'re missing required {} permissions'
|
||||
).format(self.type, label, str(MPE)))
|
||||
except AssertionError as ae:
|
||||
self.log.info((
|
||||
self.log.warning((
|
||||
'!{} "{}" - {}'
|
||||
).format(self.type, label, str(ae)))
|
||||
except NotImplementedError:
|
||||
self.log.error((
|
||||
'{} "{}" - Register method is not implemented'
|
||||
).format(
|
||||
self.type, label)
|
||||
)
|
||||
).format(self.type, label))
|
||||
except PreregisterException as exc:
|
||||
self.log.warning((
|
||||
'{} "{}" - {}'
|
||||
).format(self.type, label, str(exc)))
|
||||
except Exception as e:
|
||||
self.log.error('{} "{}" - Registration failed ({})'.format(
|
||||
self.type, label, str(e))
|
||||
|
|
@ -119,6 +127,7 @@ class BaseHandler(object):
|
|||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
self.session.rollback()
|
||||
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
|
||||
self.log.error(msg, exc_info=True)
|
||||
return {
|
||||
|
|
@ -163,10 +172,10 @@ class BaseHandler(object):
|
|||
|
||||
if result is True:
|
||||
return
|
||||
msg = "Pre-register conditions were not met"
|
||||
msg = None
|
||||
if isinstance(result, str):
|
||||
msg = result
|
||||
raise Exception(msg)
|
||||
raise PreregisterException(msg)
|
||||
|
||||
def preregister(self):
|
||||
'''
|
||||
|
|
@ -233,7 +242,7 @@ class BaseHandler(object):
|
|||
_entities is None or
|
||||
_entities[0].get(
|
||||
'link', None
|
||||
) == fa_session.ftrack_api.symbol.NOT_SET
|
||||
) == ftrack_api.symbol.NOT_SET
|
||||
):
|
||||
_entities = self._get_entities(event)
|
||||
|
||||
|
|
@ -437,7 +446,7 @@ class BaseHandler(object):
|
|||
'applicationId=ftrack.client.web and user.id="{0}"'
|
||||
).format(user_id)
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='message',
|
||||
|
|
@ -485,8 +494,8 @@ class BaseHandler(object):
|
|||
|
||||
if not user:
|
||||
raise TypeError((
|
||||
'Ftrack user with {} "{}" was not found!'.format(key, value)
|
||||
))
|
||||
'Ftrack user with {} "{}" was not found!'
|
||||
).format(key, value))
|
||||
|
||||
user_id = user['id']
|
||||
|
||||
|
|
@ -495,7 +504,7 @@ class BaseHandler(object):
|
|||
).format(user_id)
|
||||
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='widget',
|
||||
|
|
@ -523,7 +532,7 @@ class BaseHandler(object):
|
|||
else:
|
||||
first = False
|
||||
|
||||
subtitle = {'type': 'label', 'value':'<h3>{}</h3>'.format(key)}
|
||||
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
|
|
@ -583,7 +592,7 @@ class BaseHandler(object):
|
|||
|
||||
# Create and trigger event
|
||||
session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=_event_data,
|
||||
source=dict(user=_user_data)
|
||||
|
|
@ -593,3 +602,24 @@ class BaseHandler(object):
|
|||
self.log.debug(
|
||||
"Action \"{}\" Triggered successfully".format(action_name)
|
||||
)
|
||||
|
||||
def trigger_event(
|
||||
self, topic, event_data={}, session=None, source=None,
|
||||
event=None, on_error="ignore"
|
||||
):
|
||||
if session is None:
|
||||
session = self.session
|
||||
|
||||
if not source and event:
|
||||
source = event.get("source")
|
||||
# Create and trigger event
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=event_data,
|
||||
source=source
|
||||
)
|
||||
session.event_hub.publish(event, on_error=on_error)
|
||||
|
||||
self.log.debug((
|
||||
"Publishing event: {}"
|
||||
).format(str(event.__dict__)))
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ class BaseEvent(BaseHandler):
|
|||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
self.session.rollback()
|
||||
self.log.error(
|
||||
'Event "{}" Failed: {}'.format(
|
||||
self.__class__.__name__, str(exc)
|
||||
|
|
|
|||
|
|
@ -50,6 +50,19 @@ class DbConnector(object):
|
|||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
cur_proj = self.Session["AVALON_PROJECT"]
|
||||
return self._database[cur_proj].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ import threading
|
|||
import time
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pypeapp import style
|
||||
from pype.ftrack import FtrackServer, credentials
|
||||
from pype.ftrack import FtrackServer, check_ftrack_url, credentials
|
||||
from . import login_dialog
|
||||
|
||||
from pype import api as pype
|
||||
|
|
@ -24,7 +24,8 @@ class FtrackModule:
|
|||
self.thread_timer = None
|
||||
|
||||
self.bool_logged = False
|
||||
self.bool_action_server = False
|
||||
self.bool_action_server_running = False
|
||||
self.bool_action_thread_running = False
|
||||
self.bool_timer_event = False
|
||||
|
||||
def show_login_widget(self):
|
||||
|
|
@ -74,28 +75,50 @@ class FtrackModule:
|
|||
|
||||
# Actions part
|
||||
def start_action_server(self):
|
||||
self.bool_action_thread_running = True
|
||||
self.set_menu_visibility()
|
||||
if (
|
||||
self.thread_action_server is not None and
|
||||
self.bool_action_thread_running is False
|
||||
):
|
||||
self.stop_action_server()
|
||||
|
||||
if self.thread_action_server is None:
|
||||
self.thread_action_server = threading.Thread(
|
||||
target=self.set_action_server
|
||||
)
|
||||
self.thread_action_server.daemon = True
|
||||
self.thread_action_server.start()
|
||||
|
||||
log.info("Ftrack action server launched")
|
||||
self.bool_action_server = True
|
||||
self.set_menu_visibility()
|
||||
|
||||
def set_action_server(self):
|
||||
try:
|
||||
self.action_server.run_server()
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Ftrack Action server crashed! Please try to start again.",
|
||||
exc_info=True
|
||||
first_check = True
|
||||
while self.bool_action_thread_running is True:
|
||||
if not check_ftrack_url(os.environ['FTRACK_SERVER']):
|
||||
if first_check:
|
||||
log.warning(
|
||||
"Could not connect to Ftrack server"
|
||||
)
|
||||
first_check = False
|
||||
time.sleep(1)
|
||||
continue
|
||||
log.info(
|
||||
"Connected to Ftrack server. Running actions session"
|
||||
)
|
||||
# TODO show message to user
|
||||
self.bool_action_server = False
|
||||
try:
|
||||
self.bool_action_server_running = True
|
||||
self.set_menu_visibility()
|
||||
self.action_server.run_server()
|
||||
if self.bool_action_thread_running:
|
||||
log.debug("Ftrack action server has stopped")
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Ftrack Action server crashed. Trying to connect again",
|
||||
exc_info=True
|
||||
)
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
first_check = True
|
||||
|
||||
self.bool_action_thread_running = False
|
||||
|
||||
def reset_action_server(self):
|
||||
self.stop_action_server()
|
||||
|
|
@ -103,16 +126,21 @@ class FtrackModule:
|
|||
|
||||
def stop_action_server(self):
|
||||
try:
|
||||
self.bool_action_thread_running = False
|
||||
self.action_server.stop_session()
|
||||
if self.thread_action_server is not None:
|
||||
self.thread_action_server.join()
|
||||
self.thread_action_server = None
|
||||
|
||||
log.info("Ftrack action server stopped")
|
||||
self.bool_action_server = False
|
||||
log.info("Ftrack action server was forced to stop")
|
||||
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
except Exception as e:
|
||||
log.error("During Killing action server: {0}".format(e))
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing action server",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
|
|
@ -158,6 +186,9 @@ class FtrackModule:
|
|||
def tray_start(self):
|
||||
self.validate()
|
||||
|
||||
def tray_exit(self):
|
||||
self.stop_action_server()
|
||||
|
||||
# Definition of visibility of each menu actions
|
||||
def set_menu_visibility(self):
|
||||
|
||||
|
|
@ -170,9 +201,9 @@ class FtrackModule:
|
|||
self.stop_timer_thread()
|
||||
return
|
||||
|
||||
self.aRunActionS.setVisible(not self.bool_action_server)
|
||||
self.aResetActionS.setVisible(self.bool_action_server)
|
||||
self.aStopActionS.setVisible(self.bool_action_server)
|
||||
self.aRunActionS.setVisible(not self.bool_action_thread_running)
|
||||
self.aResetActionS.setVisible(self.bool_action_thread_running)
|
||||
self.aStopActionS.setVisible(self.bool_action_thread_running)
|
||||
|
||||
if self.bool_timer_event is False:
|
||||
self.start_timer_thread()
|
||||
|
|
|
|||
44
pype/lib.py
44
pype/lib.py
|
|
@ -7,8 +7,6 @@ import contextlib
|
|||
import subprocess
|
||||
import inspect
|
||||
|
||||
from .vendor import pather
|
||||
from .vendor.pather.error import ParseError
|
||||
|
||||
import avalon.io as io
|
||||
import avalon.api
|
||||
|
|
@ -562,7 +560,7 @@ def get_subsets(asset_name,
|
|||
find_dict = {"type": "representation",
|
||||
"parent": version_sel["_id"]}
|
||||
|
||||
filter_repr = {"$or": [{"name": repr} for repr in representations]}
|
||||
filter_repr = {"name": {"$in": representations}}
|
||||
|
||||
find_dict.update(filter_repr)
|
||||
repres_out = [i for i in io.find(find_dict)]
|
||||
|
|
@ -572,3 +570,43 @@ def get_subsets(asset_name,
|
|||
"representaions": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
||||
|
||||
class CustomNone:
|
||||
"""Created object can be used as custom None (not equal to None).
|
||||
|
||||
WARNING: Multiple created objects are not equal either.
|
||||
Exmple:
|
||||
>>> a = CustomNone()
|
||||
>>> a == None
|
||||
False
|
||||
>>> b = CustomNone()
|
||||
>>> a == b
|
||||
False
|
||||
>>> a == a
|
||||
True
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create uuid as identifier for custom None."""
|
||||
import uuid
|
||||
self.identifier = str(uuid.uuid4())
|
||||
|
||||
def __bool__(self):
|
||||
"""Return False (like default None)."""
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Equality is compared by identifier value."""
|
||||
if type(other) == type(self):
|
||||
if other.identifier == self.identifier:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __str__(self):
|
||||
"""Return value of identifier when converted to string."""
|
||||
return self.identifier
|
||||
|
||||
def __repr__(self):
|
||||
"""Representation of custom None."""
|
||||
return "<CustomNone-{}>".format(str(self.identifier))
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
import re
|
||||
import os
|
||||
import uuid
|
||||
import math
|
||||
|
||||
import bson
|
||||
import json
|
||||
|
|
@ -1776,9 +1777,10 @@ def set_scene_fps(fps, update=True):
|
|||
# pull from mapping
|
||||
# this should convert float string to float and int to int
|
||||
# so 25.0 is converted to 25, but 23.98 will be still float.
|
||||
decimals = int(str(fps-int(fps))[2:])
|
||||
if decimals == 0:
|
||||
fps = int(fps)
|
||||
dec, ipart = math.modf(fps)
|
||||
if dec == 0.0:
|
||||
fps = int(ipart)
|
||||
|
||||
unit = fps_mapping.get(str(fps), None)
|
||||
if unit is None:
|
||||
raise ValueError("Unsupported FPS value: `%s`" % fps)
|
||||
|
|
@ -1861,6 +1863,7 @@ def set_context_settings():
|
|||
|
||||
# Set project fps
|
||||
fps = asset_data.get("fps", project_data.get("fps", 25))
|
||||
api.Session["AVALON_FPS"] = fps
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Set project resolution
|
||||
|
|
|
|||
|
|
@ -43,8 +43,10 @@ class MusterModule:
|
|||
self.aShowLogin.trigger()
|
||||
|
||||
if "RestApiServer" in modules:
|
||||
def api_show_login():
|
||||
self.aShowLogin.trigger()
|
||||
modules["RestApiServer"].register_callback(
|
||||
"muster/show_login", api_callback, "post"
|
||||
"/show_login", api_show_login, "muster", "post"
|
||||
)
|
||||
|
||||
# Definition of Tray menu
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from pypeapp import Logger
|
||||
import hiero
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
|
|
@ -17,7 +16,8 @@ from .menu import (
|
|||
install as menu_install,
|
||||
_update_menu_task_label
|
||||
)
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
from .events import register_hiero_events
|
||||
|
||||
__all__ = [
|
||||
# Workfiles API
|
||||
|
|
@ -56,7 +56,8 @@ def install(config):
|
|||
Installing Nukestudio integration for avalon
|
||||
|
||||
Args:
|
||||
config (obj): avalon config module `pype` in our case, it is not used but required by avalon.api.install()
|
||||
config (obj): avalon config module `pype` in our case, it is not
|
||||
used but required by avalon.api.install()
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -73,7 +74,8 @@ def install(config):
|
|||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
"review"
|
||||
"review",
|
||||
"plate"
|
||||
]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
|
|
@ -82,49 +84,8 @@ def install(config):
|
|||
# install menu
|
||||
menu_install()
|
||||
|
||||
# Workfiles.
|
||||
launch_workfiles = os.environ.get("WORKFILES_STARTUP")
|
||||
|
||||
if launch_workfiles:
|
||||
hiero.core.events.registerInterest(
|
||||
"kAfterNewProjectCreated", launch_workfiles_app
|
||||
)
|
||||
|
||||
# Add tags on project load.
|
||||
hiero.core.events.registerInterest(
|
||||
"kAfterProjectLoad", add_tags
|
||||
)
|
||||
|
||||
|
||||
def add_tags(event):
|
||||
"""
|
||||
Event for automatic tag creation after nukestudio start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
"""
|
||||
|
||||
add_tags_from_presets()
|
||||
|
||||
|
||||
def launch_workfiles_app(event):
|
||||
"""
|
||||
Event for launching workfiles after nukestudio start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
"""
|
||||
from .lib import set_workfiles
|
||||
|
||||
set_workfiles()
|
||||
|
||||
# Closing the new project.
|
||||
event.sender.close()
|
||||
|
||||
# Deregister interest as its a one-time launch.
|
||||
hiero.core.events.unregisterInterest(
|
||||
"kAfterNewProjectCreated", launch_workfiles_app
|
||||
)
|
||||
# register hiero events
|
||||
register_hiero_events()
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
|
|||
107
pype/nukestudio/events.py
Normal file
107
pype/nukestudio/events.py
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import os
|
||||
import hiero.core.events
|
||||
from pypeapp import Logger
|
||||
from .lib import sync_avalon_data_to_workfile, launch_workfiles_app
|
||||
from .tags import add_tags_from_presets
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
|
||||
def startupCompleted(event):
|
||||
log.info("startup competed event...")
|
||||
return
|
||||
|
||||
|
||||
def shutDown(event):
|
||||
log.info("shut down event...")
|
||||
return
|
||||
|
||||
|
||||
def beforeNewProjectCreated(event):
|
||||
log.info("before new project created event...")
|
||||
return
|
||||
|
||||
|
||||
def afterNewProjectCreated(event):
|
||||
log.info("after new project created event...")
|
||||
# sync avalon data to project properities
|
||||
sync_avalon_data_to_workfile()
|
||||
|
||||
# add tags from preset
|
||||
add_tags_from_presets()
|
||||
|
||||
# Workfiles.
|
||||
if int(os.environ.get("WORKFILES_STARTUP", "0")):
|
||||
hiero.core.events.sendEvent("kStartWorkfiles", None)
|
||||
# reset workfiles startup not to open any more in session
|
||||
os.environ["WORKFILES_STARTUP"] = "0"
|
||||
|
||||
|
||||
def beforeProjectLoad(event):
|
||||
log.info("before project load event...")
|
||||
return
|
||||
|
||||
|
||||
def afterProjectLoad(event):
|
||||
log.info("after project load event...")
|
||||
# sync avalon data to project properities
|
||||
sync_avalon_data_to_workfile()
|
||||
|
||||
# add tags from preset
|
||||
add_tags_from_presets()
|
||||
|
||||
|
||||
def beforeProjectClosed(event):
|
||||
log.info("before project closed event...")
|
||||
return
|
||||
|
||||
|
||||
def afterProjectClosed(event):
|
||||
log.info("after project closed event...")
|
||||
return
|
||||
|
||||
|
||||
def beforeProjectSaved(event):
|
||||
log.info("before project saved event...")
|
||||
return
|
||||
|
||||
|
||||
def afterProjectSaved(event):
|
||||
log.info("after project saved event...")
|
||||
return
|
||||
|
||||
|
||||
def register_hiero_events():
|
||||
log.info(
|
||||
"Registering events for: kBeforeNewProjectCreated, "
|
||||
"kAfterNewProjectCreated, kBeforeProjectLoad, kAfterProjectLoad, "
|
||||
"kBeforeProjectSave, kAfterProjectSave, kBeforeProjectClose, "
|
||||
"kAfterProjectClose, kShutdown, kStartup"
|
||||
)
|
||||
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kBeforeNewProjectCreated", beforeNewProjectCreated)
|
||||
hiero.core.events.registerInterest(
|
||||
"kAfterNewProjectCreated", afterNewProjectCreated)
|
||||
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kBeforeProjectLoad", beforeProjectLoad)
|
||||
hiero.core.events.registerInterest(
|
||||
"kAfterProjectLoad", afterProjectLoad)
|
||||
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kBeforeProjectSave", beforeProjectSaved)
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kAfterProjectSave", afterProjectSaved)
|
||||
#
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kBeforeProjectClose", beforeProjectClosed)
|
||||
# hiero.core.events.registerInterest(
|
||||
# "kAfterProjectClose", afterProjectClosed)
|
||||
#
|
||||
# hiero.core.events.registerInterest("kShutdown", shutDown)
|
||||
# hiero.core.events.registerInterest("kStartup", startupCompleted)
|
||||
|
||||
# workfiles
|
||||
hiero.core.events.registerEventType("kStartWorkfiles")
|
||||
hiero.core.events.registerInterest("kStartWorkfiles", launch_workfiles_app)
|
||||
|
|
@ -25,19 +25,26 @@ def set_workfiles():
|
|||
''' Wrapping function for workfiles launcher '''
|
||||
from avalon.tools import workfiles
|
||||
|
||||
# import session to get project dir
|
||||
S = avalon.Session
|
||||
active_project_root = os.path.normpath(
|
||||
os.path.join(S['AVALON_PROJECTS'], S['AVALON_PROJECT'])
|
||||
)
|
||||
workdir = os.environ["AVALON_WORKDIR"]
|
||||
|
||||
# show workfile gui
|
||||
workfiles.show(workdir)
|
||||
|
||||
def sync_avalon_data_to_workfile():
|
||||
# import session to get project dir
|
||||
S = avalon.Session
|
||||
active_project_root = os.path.normpath(
|
||||
os.path.join(S['AVALON_PROJECTS'], S['AVALON_PROJECT'])
|
||||
)
|
||||
# getting project
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
if "Tag Presets" in project.name():
|
||||
return
|
||||
|
||||
log.debug("Synchronizing Pype metadata to project: {}".format(
|
||||
project.name()))
|
||||
|
||||
# set project root with backward compatibility
|
||||
try:
|
||||
project.setProjectDirectory(active_project_root)
|
||||
|
|
@ -48,7 +55,7 @@ def set_workfiles():
|
|||
# get project data from avalon db
|
||||
project_data = pype.get_project()["data"]
|
||||
|
||||
log.info("project_data: {}".format(project_data))
|
||||
log.debug("project_data: {}".format(project_data))
|
||||
|
||||
# get format and fps property from avalon db on project
|
||||
width = project_data["resolutionWidth"]
|
||||
|
|
@ -68,6 +75,17 @@ def set_workfiles():
|
|||
log.info("Project property has been synchronised with Avalon db")
|
||||
|
||||
|
||||
def launch_workfiles_app(event):
|
||||
"""
|
||||
Event for launching workfiles after nukestudio start
|
||||
|
||||
Args:
|
||||
event (obj): required but unused
|
||||
"""
|
||||
set_workfiles()
|
||||
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
|
|
|
|||
|
|
@ -52,7 +52,13 @@ def add_tags_from_presets():
|
|||
"""
|
||||
Will create default tags from presets.
|
||||
"""
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
if "Tag Presets" in project.name():
|
||||
return
|
||||
|
||||
log.debug("Setting default tags on project: {}".format(project.name()))
|
||||
|
||||
# get all presets
|
||||
presets = config.get_presets()
|
||||
|
||||
|
|
@ -77,7 +83,7 @@ def add_tags_from_presets():
|
|||
|
||||
# Get project assets. Currently Ftrack specific to differentiate between
|
||||
# asset builds and shots.
|
||||
if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) is 1:
|
||||
if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) == 1:
|
||||
nks_pres_tags["[AssetBuilds]"] = {}
|
||||
for asset in io.find({"type": "asset"}):
|
||||
if asset["data"]["entityType"] == "AssetBuild":
|
||||
|
|
@ -150,3 +156,5 @@ def add_tags_from_presets():
|
|||
# update only non hierarchy tags
|
||||
# because hierarchy could be edited
|
||||
update_tag(_t, _val)
|
||||
|
||||
log.info("Default Tags were set...")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import os
|
||||
|
||||
import hiero
|
||||
|
||||
from avalon import api
|
||||
from pypeapp import Logger
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__, "nukestudio")
|
||||
|
||||
def file_extensions():
|
||||
return [".hrox"]
|
||||
|
||||
|
|
@ -12,20 +13,55 @@ def file_extensions():
|
|||
def has_unsaved_changes():
|
||||
# There are no methods for querying unsaved changes to a project, so
|
||||
# enforcing to always save.
|
||||
return True
|
||||
# but we could at least check if a current open script has a path
|
||||
project = hiero.core.projects()[-1]
|
||||
if project.path():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
project = hiero.core.projects()[-1]
|
||||
if project:
|
||||
|
||||
# close `Untitled` project
|
||||
if "Untitled" not in project.name():
|
||||
log.info("Saving project: `{}`".format(project.name()))
|
||||
project.saveAs(filepath)
|
||||
else:
|
||||
elif not project:
|
||||
log.info("Creating new project...")
|
||||
project = hiero.core.newProject()
|
||||
project.saveAs(filepath)
|
||||
else:
|
||||
log.info("Dropping `Untitled` project...")
|
||||
return
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
hiero.core.openProject(filepath)
|
||||
"""Manually fire the kBeforeProjectLoad event in order to work around a bug in Hiero.
|
||||
The Foundry has logged this bug as:
|
||||
Bug 40413 - Python API - kBeforeProjectLoad event type is not triggered
|
||||
when calling hiero.core.openProject() (only triggered through UI)
|
||||
It exists in all versions of Hiero through (at least) v1.9v1b12.
|
||||
|
||||
Once this bug is fixed, a version check will need to be added here in order to
|
||||
prevent accidentally firing this event twice. The following commented-out code
|
||||
is just an example, and will need to be updated when the bug is fixed to catch the
|
||||
correct versions."""
|
||||
# if (hiero.core.env['VersionMajor'] < 1 or
|
||||
# hiero.core.env['VersionMajor'] == 1 and hiero.core.env['VersionMinor'] < 10:
|
||||
hiero.core.events.sendEvent("kBeforeProjectLoad", None)
|
||||
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
# open project file
|
||||
hiero.core.openProject(filepath.replace(os.path.sep, "/"))
|
||||
|
||||
# close previous project
|
||||
project.close()
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import six
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
|
@ -125,6 +126,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
metadata=asset_metadata
|
||||
)
|
||||
)
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
existing_asset_metadata = asset_entity["metadata"]
|
||||
|
|
@ -137,8 +144,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
"version": 0,
|
||||
"asset": asset_entity,
|
||||
}
|
||||
if task:
|
||||
assetversion_data['task'] = task
|
||||
|
||||
assetversion_data.update(data.get("assetversion_data", {}))
|
||||
|
||||
|
|
@ -150,6 +155,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
# due to a ftrack_api bug where you can't add metadata on creation.
|
||||
assetversion_metadata = assetversion_data.pop("metadata", {})
|
||||
|
||||
if task:
|
||||
assetversion_data['task'] = task
|
||||
|
||||
# Create a new entity if none exits.
|
||||
if not assetversion_entity:
|
||||
assetversion_entity = session.create(
|
||||
|
|
@ -162,6 +170,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
metadata=assetversion_metadata
|
||||
)
|
||||
)
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
existing_assetversion_metadata = assetversion_entity["metadata"]
|
||||
|
|
@ -170,7 +184,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
|
||||
# Have to commit the version and asset, because location can't
|
||||
# determine the final location without.
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Component
|
||||
# Get existing entity.
|
||||
|
|
@ -209,7 +228,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
session.delete(member)
|
||||
del(member)
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Reset members in memory
|
||||
if "members" in component_entity.keys():
|
||||
|
|
@ -320,4 +344,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
)
|
||||
else:
|
||||
# Commit changes.
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
|
|||
31
pype/plugins/ftrack/publish/integrate_ftrack_comments.py
Normal file
31
pype/plugins/ftrack/publish/integrate_ftrack_comments.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import sys
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
|
||||
class IntegrateFtrackComments(pyblish.api.InstancePlugin):
|
||||
"""Create comments in Ftrack."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Integrate Comments to Ftrack."
|
||||
families = ["shot"]
|
||||
|
||||
def process(self, instance):
|
||||
session = instance.context.data["ftrackSession"]
|
||||
|
||||
entity = session.query(
|
||||
"Shot where name is \"{}\"".format(instance.data["item"].name())
|
||||
).one()
|
||||
|
||||
notes = []
|
||||
for comment in instance.data["comments"]:
|
||||
notes.append(session.create("Note", {"content": comment}))
|
||||
|
||||
entity["notes"].extend(notes)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
@ -28,7 +28,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'plate': 'img',
|
||||
'audio': 'audio',
|
||||
'workfile': 'scene',
|
||||
'animation': 'cache'
|
||||
'animation': 'cache',
|
||||
'image': 'img'
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -37,6 +38,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
else:
|
||||
raise ValueError("Instance version not set")
|
||||
|
||||
family = instance.data['family'].lower()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
import sys
|
||||
|
||||
import six
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
|
|
@ -44,15 +47,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
input_data = context.data["hierarchyContext"]
|
||||
|
||||
# self.import_to_ftrack(input_data)
|
||||
|
||||
try:
|
||||
self.import_to_ftrack(input_data)
|
||||
except Exception as exc:
|
||||
import sys
|
||||
import traceback
|
||||
self.log.info(traceback.format_exc(sys.exc_info()))
|
||||
raise Exception("failed")
|
||||
self.import_to_ftrack(input_data)
|
||||
|
||||
def import_to_ftrack(self, input_data, parent=None):
|
||||
for entity_name in input_data:
|
||||
|
|
@ -74,9 +69,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
# try to find if entity already exists
|
||||
else:
|
||||
query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format(
|
||||
entity_name, self.ft_project["full_name"]
|
||||
)
|
||||
query = (
|
||||
'TypedContext where name is "{0}" and '
|
||||
'project_id is "{1}"'
|
||||
).format(entity_name, self.ft_project["id"])
|
||||
try:
|
||||
entity = self.session.query(query).one()
|
||||
except Exception:
|
||||
|
|
@ -106,7 +102,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
for instance in instances:
|
||||
instance.data['ftrackEntity'] = entity
|
||||
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
|
|
@ -129,11 +130,21 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
task_type=task,
|
||||
parent=entity
|
||||
)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Incoming links.
|
||||
self.create_links(entity_data, entity)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_ftrack(
|
||||
|
|
@ -143,7 +154,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# Clear existing links.
|
||||
for link in entity.get("incoming_links", []):
|
||||
self.session.delete(link)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Create new links.
|
||||
for input in entity_data.get("inputs", []):
|
||||
|
|
@ -179,7 +195,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.log.info(self.task_types)
|
||||
task['type'] = self.task_types[task_type]
|
||||
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
|
||||
|
|
@ -188,6 +209,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return entity
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class CollectJSON(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import datetime
|
|||
import time
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -40,6 +40,15 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in instance.context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
assert instance not in failed, ("Result of '{}' instance "
|
||||
"were not success".format(instance.data["name"]))
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -24,4 +24,4 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
rootVersion = pype.get_version_from_path(filename)
|
||||
context.data['version'] = rootVersion
|
||||
|
||||
self.log.info('Scene Version: %s' % context.data('version'))
|
||||
self.log.info('Scene Version: %s' % context.data.get('version'))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
import pype.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
import pype.api
|
||||
from pypeapp import config
|
||||
|
||||
|
|
|
|||
|
|
@ -1,18 +1,23 @@
|
|||
import os
|
||||
from os.path import getsize
|
||||
import logging
|
||||
import speedcopy
|
||||
import sys
|
||||
import clique
|
||||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon.vendor import filelink
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
from speedcopy import copyfile
|
||||
else:
|
||||
from shutil import copyfile
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issius
|
||||
"""Resolve any dependency issues
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
|
@ -57,7 +62,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"render",
|
||||
"imagesequence",
|
||||
"review",
|
||||
"render",
|
||||
"rendersetup",
|
||||
"rig",
|
||||
"plate",
|
||||
|
|
@ -65,7 +69,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"lut",
|
||||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache"
|
||||
"yeticache",
|
||||
"source",
|
||||
"matchmove",
|
||||
"image"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
|
|
@ -475,7 +482,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# copy file with speedcopy and check if size of files are simetrical
|
||||
while True:
|
||||
speedcopy.copyfile(src, dst)
|
||||
copyfile(src, dst)
|
||||
if str(getsize(src)) in str(getsize(dst)):
|
||||
break
|
||||
|
||||
|
|
@ -493,7 +500,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
filelink.create(src, dst, filelink.HARDLINK)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
|
|
@ -502,7 +508,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
subset_name = instance.data["subset"]
|
||||
self.log.info("Subset '%s' not found, creating.." % subset_name)
|
||||
self.log.debug("families. %s" % instance.data.get('families'))
|
||||
self.log.debug("families. %s" % type(instance.data.get('families')))
|
||||
self.log.debug(
|
||||
"families. %s" % type(instance.data.get('families')))
|
||||
|
||||
_id = io.insert_one({
|
||||
"schema": "pype:subset-3.0",
|
||||
|
|
@ -516,6 +523,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
# add group if available
|
||||
if instance.data.get("subsetGroup"):
|
||||
subset["data"].update(
|
||||
{"subsetGroup": instance.data.get("subsetGroup")}
|
||||
)
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset["_id"])
|
||||
}, {'$set': subset["data"]}
|
||||
)
|
||||
|
||||
return subset
|
||||
|
||||
def create_version(self, subset, version_number, locations, data=None):
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Integrate Frames"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["imagesequence", "source"]
|
||||
families = ["imagesequence"]
|
||||
|
||||
family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"]
|
||||
exclude_families = ["clip"]
|
||||
|
|
|
|||
28
pype/plugins/global/publish/validate_containers.py
Normal file
28
pype/plugins/global/publish/validate_containers.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import pyblish.api
|
||||
|
||||
import pype.lib
|
||||
from avalon.tools import cbsceneinventory
|
||||
|
||||
|
||||
class ShowInventory(pyblish.api.Action):
|
||||
|
||||
label = "Show Inventory"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
cbsceneinventory.show()
|
||||
|
||||
|
||||
class ValidateContainers(pyblish.api.ContextPlugin):
|
||||
"""Containers are must be updated to latest version on publish."""
|
||||
|
||||
label = "Validate Containers"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "houdini", "nuke"]
|
||||
optional = True
|
||||
actions = [ShowInventory]
|
||||
|
||||
def process(self, context):
|
||||
if pype.lib.any_outdated():
|
||||
raise ValueError("There are outdated containers in the scene.")
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class ValidateTemplates(pyblish.api.ContextPlugin):
|
||||
"""Check if all templates were filed"""
|
||||
"""Check if all templates were filled"""
|
||||
|
||||
label = "Validate Templates"
|
||||
order = pyblish.api.ValidatorOrder - 0.1
|
||||
|
|
@ -18,12 +19,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
|
||||
"project": {"name": "D001_projectsx",
|
||||
"code": "prjX"},
|
||||
"ext": "exr",
|
||||
"version": 3,
|
||||
"task": "animation",
|
||||
"asset": "sh001",
|
||||
"hierarchy": "ep101/sq01/sh010"}
|
||||
|
||||
"ext": "exr",
|
||||
"version": 3,
|
||||
"task": "animation",
|
||||
"asset": "sh001",
|
||||
"app": "maya",
|
||||
"hierarchy": "ep101/sq01/sh010"}
|
||||
|
||||
anatomy_filled = anatomy.format(data)
|
||||
self.log.info(anatomy_filled)
|
||||
|
|
@ -31,11 +32,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
|
||||
"project": {"name": "D001_projectsy",
|
||||
"code": "prjY"},
|
||||
"ext": "abc",
|
||||
"version": 1,
|
||||
"task": "lookdev",
|
||||
"asset": "bob",
|
||||
"hierarchy": "ep101/sq01/bob"}
|
||||
"ext": "abc",
|
||||
"version": 1,
|
||||
"task": "lookdev",
|
||||
"asset": "bob",
|
||||
"app": "maya",
|
||||
"hierarchy": "ep101/sq01/bob"}
|
||||
|
||||
anatomy_filled = context.data["anatomy"].format(data)
|
||||
self.log.info(anatomy_filled["work"]["folder"])
|
||||
|
|
|
|||
|
|
@ -18,3 +18,6 @@ class CreateLook(avalon.maya.Creator):
|
|||
|
||||
# Whether to automatically convert the textures to .tx upon publish.
|
||||
self.data["maketx"] = True
|
||||
|
||||
# Enable users to force a copy.
|
||||
self.data["forceCopy"] = False
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
self.log.warning("Deadline REST API url not found.")
|
||||
else:
|
||||
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
|
||||
response = requests.get(argument)
|
||||
response = self._requests_get(argument)
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
else:
|
||||
|
|
@ -135,7 +135,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
'authToken': self._token
|
||||
}
|
||||
api_entry = '/api/pools/list'
|
||||
response = requests.get(
|
||||
response = self._requests_get(
|
||||
self.MUSTER_REST_URL + api_entry, params=params)
|
||||
if response.status_code != 200:
|
||||
if response.status_code == 401:
|
||||
|
|
|
|||
30
pype/plugins/maya/load/load_matchmove.py
Normal file
30
pype/plugins/maya/load/load_matchmove.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
from avalon import api
|
||||
from maya import mel
|
||||
|
||||
|
||||
class MatchmoveLoader(api.Loader):
|
||||
"""
|
||||
This will run matchmove script to create track in scene.
|
||||
|
||||
Supported script types are .py and .mel
|
||||
"""
|
||||
|
||||
families = ["matchmove"]
|
||||
representations = ["py", "mel"]
|
||||
defaults = ["Camera", "Object", "Mocap"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
icon = "empire"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
if self.fname.lower().endswith(".py"):
|
||||
exec(open(self.fname).read())
|
||||
|
||||
elif self.fname.lower().endswith(".mel"):
|
||||
mel.eval('source "{}"'.format(self.fname))
|
||||
|
||||
else:
|
||||
self.log.error("Unsupported script type")
|
||||
|
||||
return True
|
||||
|
|
@ -206,6 +206,11 @@ class ExtractLook(pype.api.Extractor):
|
|||
destination = self.resource_destination(
|
||||
instance, source, do_maketx
|
||||
)
|
||||
|
||||
# Force copy is specified.
|
||||
if instance.data.get("forceCopy", False):
|
||||
mode = COPY
|
||||
|
||||
if mode == COPY:
|
||||
transfers.append((source, destination))
|
||||
elif mode == HARDLINK:
|
||||
|
|
|
|||
|
|
@ -1,19 +1,16 @@
|
|||
import os
|
||||
import subprocess
|
||||
import contextlib
|
||||
import json
|
||||
import capture_gui
|
||||
import clique
|
||||
|
||||
#
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
import avalon.maya
|
||||
|
||||
#
|
||||
from maya import cmds, mel
|
||||
import pymel.core as pm
|
||||
from pype.vendor import ffmpeg
|
||||
# from pype.scripts import otio_burnin
|
||||
reload(ffmpeg)
|
||||
# import ffmpeg
|
||||
# # from pype.scripts import otio_burnin
|
||||
# reload(ffmpeg)
|
||||
|
||||
|
||||
# TODO: move codec settings to presets
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ import pype.api
|
|||
|
||||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
from pype.vendor import ffmpeg
|
||||
reload(ffmpeg)
|
||||
# import ffmpeg
|
||||
# reload(ffmpeg)
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
|
|
|||
|
|
@ -271,20 +271,21 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
for key in environment:
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
to_process = environment[key]
|
||||
self.log.debug("value: {}".format(environment[key]))
|
||||
to_process = str(environment[key])
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = environment[key]
|
||||
elif "://" in environment[key]:
|
||||
clean_path = environment[key]
|
||||
elif os.pathsep not in to_process:
|
||||
clean_path = to_process
|
||||
elif "://" in to_process:
|
||||
clean_path = to_process
|
||||
elif os.pathsep not in str(to_process):
|
||||
try:
|
||||
path = environment[key]
|
||||
path = to_process
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
else:
|
||||
for path in environment[key].split(os.pathsep):
|
||||
for path in to_process.split(os.pathsep):
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
|
|
|
|||
50
pype/plugins/maya/publish/validate_assembly_name.py
Normal file
50
pype/plugins/maya/publish/validate_assembly_name.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import pyblish.api
|
||||
import maya.cmds as cmds
|
||||
import pype.maya.action
|
||||
|
||||
|
||||
class ValidateAssemblyName(pyblish.api.InstancePlugin):
|
||||
""" Ensure Assembly name ends with `GRP`
|
||||
|
||||
Check if assembly name ends with `_GRP` string.
|
||||
"""
|
||||
|
||||
label = "Validate Assembly Name"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["assembly"]
|
||||
actions = [pype.maya.action.SelectInvalidAction]
|
||||
active = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
cls.log.info("Checking name of {}".format(instance.name))
|
||||
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
cls.log.error("Instance has no nodes!")
|
||||
return True
|
||||
|
||||
# All children will be included in the extracted export so we also
|
||||
# validate *all* descendents of the set members and we skip any
|
||||
# intermediate shapes
|
||||
descendants = cmds.listRelatives(content_instance,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
content_instance = list(set(content_instance + descendants))
|
||||
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
|
||||
|
||||
invalid = []
|
||||
for cr in assemblies:
|
||||
if not cr.endswith('_GRP'):
|
||||
cls.log.error("{} doesn't end with _GRP".format(cr))
|
||||
invalid.append(cr)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found {} invalid named assembly "
|
||||
"items".format(len(invalid)))
|
||||
98
pype/plugins/maya/publish/validate_model_name.py
Normal file
98
pype/plugins/maya/publish/validate_model_name.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
from maya import cmds
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
import pype.maya.action
|
||||
import re
|
||||
|
||||
|
||||
class ValidateModelName(pyblish.api.InstancePlugin):
|
||||
"""Validate name of model
|
||||
|
||||
starts with (somename)_###_(materialID)_GEO
|
||||
materialID must be present in list
|
||||
padding number doesn't have limit
|
||||
|
||||
"""
|
||||
optional = True
|
||||
order = pype.api.ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Model Name"
|
||||
actions = [pype.maya.action.SelectInvalidAction]
|
||||
# path to shader names definitions
|
||||
# TODO: move it to preset file
|
||||
material_file = None
|
||||
active = False
|
||||
regex = '(.*)_(\\d)*_(.*)_(GEO)'
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
# find out if supplied transform is group or not
|
||||
def is_group(groupName):
|
||||
try:
|
||||
children = cmds.listRelatives(groupName, children=True)
|
||||
for child in children:
|
||||
if not cmds.ls(child, transforms=True):
|
||||
return False
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
invalid = []
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
cls.log.error("Instance has no nodes!")
|
||||
return True
|
||||
pass
|
||||
descendants = cmds.listRelatives(content_instance,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
trns = cmds.ls(descendants, long=False, type=('transform'))
|
||||
|
||||
# filter out groups
|
||||
filter = [node for node in trns if not is_group(node)]
|
||||
|
||||
# load shader list file as utf-8
|
||||
if cls.material_file:
|
||||
shader_file = open(cls.material_file, "r")
|
||||
shaders = shader_file.readlines()
|
||||
shader_file.close()
|
||||
|
||||
# strip line endings from list
|
||||
shaders = map(lambda s: s.rstrip(), shaders)
|
||||
|
||||
# compile regex for testing names
|
||||
r = re.compile(cls.regex)
|
||||
|
||||
for obj in filter:
|
||||
m = r.match(obj)
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
else:
|
||||
# if we have shader files and shader named group is in
|
||||
# regex, test this group against names in shader file
|
||||
if 'shader' in r.groupindex and shaders:
|
||||
try:
|
||||
if not m.group('shader') in shaders:
|
||||
cls.log.error(
|
||||
"invalid materialID on: {0} ({1})".format(
|
||||
obj, m.group('shader')))
|
||||
invalid.append(obj)
|
||||
except IndexError:
|
||||
# shader named group doesn't match
|
||||
cls.log.error(
|
||||
"shader group doesn't match: {}".format(obj))
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Model naming is invalid. See log.")
|
||||
78
pype/plugins/maya/publish/validate_shader_name.py
Normal file
78
pype/plugins/maya/publish/validate_shader_name.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
import pype.maya.action
|
||||
import re
|
||||
|
||||
|
||||
class ValidateShaderName(pyblish.api.InstancePlugin):
|
||||
"""Validate shader name assigned.
|
||||
|
||||
It should be <assetName>_<*>_SHD
|
||||
|
||||
"""
|
||||
optional = True
|
||||
active = False
|
||||
order = pype.api.ValidateContentsOrder
|
||||
families = ["look"]
|
||||
hosts = ['maya']
|
||||
label = 'Validate Shaders Name'
|
||||
actions = [pype.maya.action.SelectInvalidAction]
|
||||
regex = r'(?P<asset>.*)_(.*)_SHD'
|
||||
|
||||
# The default connections to check
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError("Found shapes with invalid shader names "
|
||||
"assigned: "
|
||||
"\n{}".format(invalid))
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid = []
|
||||
|
||||
# Get all shapes from the instance
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
cls.log.error("Instance has no nodes!")
|
||||
return True
|
||||
pass
|
||||
descendants = cmds.listRelatives(content_instance,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
|
||||
shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True)
|
||||
asset_name = instance.data.get("asset", None)
|
||||
|
||||
# Check the number of connected shadingEngines per shape
|
||||
r = re.compile(cls.regex)
|
||||
for shape in shapes:
|
||||
shading_engines = cmds.listConnections(shape,
|
||||
destination=True,
|
||||
type="shadingEngine") or []
|
||||
shaders = cmds.ls(
|
||||
cmds.listConnections(shading_engines), materials=1
|
||||
)
|
||||
|
||||
for shader in shaders:
|
||||
m = r.match(cls.regex, shader)
|
||||
if m is None:
|
||||
invalid.append(shape)
|
||||
cls.log.error(
|
||||
"object {0} has invalid shader name {1}".format(shape,
|
||||
shader)
|
||||
)
|
||||
else:
|
||||
if 'asset' in r.groupindex:
|
||||
if m.group('asset') != asset_name:
|
||||
invalid.append(shape)
|
||||
cls.log.error(("object {0} has invalid "
|
||||
"shader name {1}").format(shape,
|
||||
shader))
|
||||
|
||||
return invalid
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
# creates backdrop which is published as separate nuke script
|
||||
# it is versioned by major version
|
||||
16
pype/plugins/nuke/create/create_backdrop.py
Normal file
16
pype/plugins/nuke/create/create_backdrop.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from avalon.nuke.pipeline import Creator
|
||||
|
||||
|
||||
class CreateBackdrop(Creator):
|
||||
"""Add Publishable Backdrop"""
|
||||
|
||||
name = "backdrop"
|
||||
label = "Backdrop"
|
||||
family = "group"
|
||||
icon = "cube"
|
||||
defaults = ["Main"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateBackdrop, self).__init__(*args, **kwargs)
|
||||
|
||||
return
|
||||
|
|
@ -17,21 +17,24 @@ class CrateRead(avalon.nuke.Creator):
|
|||
family = "source"
|
||||
families = family
|
||||
icon = "film"
|
||||
defaults = ["Effect", "Backplate", "Fire", "Smoke"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CrateRead, self).__init__(*args, **kwargs)
|
||||
|
||||
self.nodes = nuke.selectedNodes()
|
||||
data = OrderedDict()
|
||||
data['family'] = self.family
|
||||
data['families'] = self.families
|
||||
{data.update({k: v}) for k, v in self.data.items()
|
||||
if k not in data.keys()}
|
||||
|
||||
for k, v in self.data.items():
|
||||
if k not in data.keys():
|
||||
data.update({k: v})
|
||||
|
||||
self.data = data
|
||||
|
||||
def process(self):
|
||||
self.name = self.data["subset"]
|
||||
|
||||
nodes = nuke.selectedNodes()
|
||||
nodes = self.nodes
|
||||
|
||||
if not nodes or len(nodes) == 0:
|
||||
nuke.message('Please select Read node')
|
||||
|
|
@ -40,9 +43,8 @@ class CrateRead(avalon.nuke.Creator):
|
|||
for node in nodes:
|
||||
if node.Class() != 'Read':
|
||||
continue
|
||||
name = node["name"].value()
|
||||
avalon_data = self.data
|
||||
avalon_data['subset'] = "{}_{}".format(self.family, name)
|
||||
avalon_data['subset'] = "{}".format(self.name)
|
||||
self.change_read_node(self.data["subset"], node, avalon_data)
|
||||
count_reads += 1
|
||||
|
||||
|
|
@ -52,4 +54,4 @@ class CrateRead(avalon.nuke.Creator):
|
|||
|
||||
def change_read_node(self, name, node, data):
|
||||
node = avalon.nuke.lib.imprint(node, data)
|
||||
node['tile_color'].setValue(16711935)
|
||||
node['tile_color'].setValue(16744935)
|
||||
|
|
|
|||
|
|
@ -28,8 +28,8 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
|
||||
data = OrderedDict()
|
||||
|
||||
data["family"] = self.nClass
|
||||
data["families"] = self.family
|
||||
data["family"] = self.family
|
||||
data["families"] = self.nClass
|
||||
|
||||
for k, v in self.data.items():
|
||||
if k not in data.keys():
|
||||
|
|
@ -40,7 +40,6 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
|
||||
def process(self):
|
||||
from pype.nuke import lib as pnlib
|
||||
reload(pnlib)
|
||||
|
||||
inputs = []
|
||||
outputs = []
|
||||
|
|
@ -101,7 +100,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
for output in outputs:
|
||||
output.setInput(0, write_node)
|
||||
|
||||
return True
|
||||
return write_node
|
||||
|
||||
#
|
||||
# class CreateWritePrerender(avalon.nuke.Creator):
|
||||
|
|
|
|||
24
pype/plugins/nuke/load/load_matchmove.py
Normal file
24
pype/plugins/nuke/load/load_matchmove.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class MatchmoveLoader(api.Loader):
|
||||
"""
|
||||
This will run matchmove script to create track in script.
|
||||
"""
|
||||
|
||||
families = ["matchmove"]
|
||||
representations = ["py"]
|
||||
defaults = ["Camera", "Object"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
icon = "empire"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
if self.fname.lower().endswith(".py"):
|
||||
exec(open(self.fname).read())
|
||||
|
||||
else:
|
||||
self.log.error("Unsupported script type")
|
||||
|
||||
return True
|
||||
|
|
@ -15,16 +15,17 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
instances = []
|
||||
# creating instances per write node
|
||||
|
||||
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
|
||||
for node in nuke.allNodes():
|
||||
|
||||
try:
|
||||
if node["disable"].value():
|
||||
continue
|
||||
|
|
@ -58,15 +59,22 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
instance.append(i)
|
||||
node.end()
|
||||
|
||||
family = avalon_knob_data["families"]
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
family = "render.local"
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
family = "render.farm"
|
||||
instance.data['transfer'] = False
|
||||
family = avalon_knob_data["family"]
|
||||
families = [avalon_knob_data["families"]]
|
||||
|
||||
if node.Class() not in "Read":
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
add_family = "render.local"
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
add_family = "render.farm"
|
||||
instance.data["transfer"] = False
|
||||
families.append(add_family)
|
||||
else:
|
||||
# add family into families
|
||||
families.insert(0, family)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
|
|
@ -74,8 +82,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
"label": node.name(),
|
||||
"name": node.name(),
|
||||
"subset": subset,
|
||||
"family": avalon_knob_data["family"],
|
||||
"families": [avalon_knob_data["family"], family],
|
||||
"family": family,
|
||||
"families": families,
|
||||
"avalonKnob": avalon_knob_data,
|
||||
"publish": node.knob('publish').value(),
|
||||
"step": 1,
|
||||
|
|
|
|||
|
|
@ -1,25 +1,30 @@
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWriteLegacy(pyblish.api.ContextPlugin):
|
||||
class CollectWriteLegacy(pyblish.api.InstancePlugin):
|
||||
"""Collect legacy write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Write Legacy"
|
||||
order = pyblish.api.CollectorOrder + 0.0101
|
||||
label = "Collect Write node Legacy"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
self.log.info(instance[:])
|
||||
node = instance[0]
|
||||
|
||||
for node in nuke.allNodes():
|
||||
if node.Class() != "Write":
|
||||
continue
|
||||
if node.Class() not in ["Group", "Write"]:
|
||||
return
|
||||
|
||||
if "avalon" not in node.knobs().keys():
|
||||
continue
|
||||
family_knobs = ["ak:family", "avalon:family"]
|
||||
test = [k for k in node.knobs().keys() if k in family_knobs]
|
||||
self.log.info(test)
|
||||
|
||||
instance = context.create_instance(
|
||||
node.name(), family="write.legacy"
|
||||
)
|
||||
instance.append(node)
|
||||
if len(test) == 1:
|
||||
if "render" in node[test[0]].value():
|
||||
self.log.info("render")
|
||||
return
|
||||
|
||||
instance.data.update(
|
||||
{"family": "write.legacy",
|
||||
"families": []}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,112 +1,132 @@
|
|||
import os
|
||||
import re
|
||||
import clique
|
||||
import nuke
|
||||
import pyblish.api
|
||||
import logging
|
||||
from avalon import io, api
|
||||
|
||||
log = logging.get_logger(__name__)
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectNukeReads(pyblish.api.ContextPlugin):
|
||||
class CollectNukeReads(pyblish.api.InstancePlugin):
|
||||
"""Collect all read nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
label = "Collect Reads"
|
||||
hosts = ["nuke"]
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["source"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
for instance in context.data["instances"]:
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
node = instance[0]
|
||||
if node.Class() != "Read":
|
||||
continue
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
file_path = node["file"].value()
|
||||
file_name = os.path.basename(file_path)
|
||||
items = file_name.split(".")
|
||||
node = instance[0]
|
||||
if node.Class() != "Read":
|
||||
return
|
||||
|
||||
if len(items) < 2:
|
||||
raise ValueError
|
||||
file_path = node["file"].value()
|
||||
file_name = os.path.basename(file_path)
|
||||
items = file_name.split(".")
|
||||
|
||||
ext = items[-1]
|
||||
if len(items) < 2:
|
||||
raise ValueError
|
||||
|
||||
# # Get frame range
|
||||
first_frame = node['first'].value()
|
||||
last_frame = node['last'].value()
|
||||
ext = items[-1]
|
||||
|
||||
# # Easier way to sequence - Not tested
|
||||
# isSequence = True
|
||||
# if first_frame == last_frame:
|
||||
# isSequence = False
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = node['first'].value()
|
||||
last_frame = node['last'].value()
|
||||
|
||||
isSequence = False
|
||||
if len(items) > 1:
|
||||
sequence = items[-2]
|
||||
hash_regex = re.compile(r'([#*])')
|
||||
seq_regex = re.compile('[%0-9*d]')
|
||||
hash_match = re.match(hash_regex, sequence)
|
||||
seq_match = re.match(seq_regex, sequence)
|
||||
if hash_match or seq_match:
|
||||
isSequence = True
|
||||
# colorspace
|
||||
colorspace = node["colorspace"].value()
|
||||
if "default" in colorspace:
|
||||
colorspace = colorspace.replace("default (", "").replace(")", "")
|
||||
|
||||
# get source path
|
||||
path = nuke.filename(node)
|
||||
source_dir = os.path.dirname(path)
|
||||
self.log.debug('source dir: {}'.format(source_dir))
|
||||
# # Easier way to sequence - Not tested
|
||||
# isSequence = True
|
||||
# if first_frame == last_frame:
|
||||
# isSequence = False
|
||||
|
||||
if isSequence:
|
||||
source_files = os.listdir(source_dir)
|
||||
else:
|
||||
source_files = file_name
|
||||
isSequence = False
|
||||
if len(items) > 1:
|
||||
sequence = items[-2]
|
||||
hash_regex = re.compile(r'([#*])')
|
||||
seq_regex = re.compile(r'[%0-9*d]')
|
||||
hash_match = re.match(hash_regex, sequence)
|
||||
seq_match = re.match(seq_regex, sequence)
|
||||
if hash_match or seq_match:
|
||||
isSequence = True
|
||||
|
||||
# Include start and end render frame in label
|
||||
name = node.name()
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
# get source path
|
||||
path = nuke.filename(node)
|
||||
source_dir = os.path.dirname(path)
|
||||
self.log.debug('source dir: {}'.format(source_dir))
|
||||
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
if isSequence:
|
||||
source_files = [f for f in os.listdir(source_dir)
|
||||
if ext in f
|
||||
if items[0] in f]
|
||||
else:
|
||||
source_files = file_name
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
# Include start and end render frame in label
|
||||
name = node.name()
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': "." + ext,
|
||||
'files': source_files,
|
||||
"stagingDir": source_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
|
||||
transfer = False
|
||||
if "publish" in node.knobs():
|
||||
transfer = node["publish"]
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
instance.data['transfer'] = transfer
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'files': source_files,
|
||||
"stagingDir": source_dir,
|
||||
"frameStart": "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.debug("checking for error: {}".format(label))
|
||||
instance.data.update({
|
||||
"path": path,
|
||||
"stagingDir": source_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
transfer = False
|
||||
if "publish" in node.knobs():
|
||||
transfer = node["publish"]
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
instance.data['transfer'] = transfer
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": colorspace,
|
||||
"families": [instance.data["family"]],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
"stagingDir": source_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"colorspace": colorspace,
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue