mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch '2.x/develop' into bugfix/maya_icon_rgb_bug
This commit is contained in:
commit
00d9d806ff
42 changed files with 1621 additions and 1283 deletions
|
|
@ -18,12 +18,7 @@ def set_scene_settings(settings):
|
|||
if (args[0]["frameStart"] && args[0]["frameEnd"])
|
||||
{
|
||||
var duration = args[0]["frameEnd"] - args[0]["frameStart"] + 1
|
||||
if (frame.numberOf() > duration)
|
||||
{
|
||||
frame.remove(
|
||||
duration, frame.numberOf() - duration
|
||||
);
|
||||
}
|
||||
|
||||
if (frame.numberOf() < duration)
|
||||
{
|
||||
frame.insert(
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from .io_nonsingleton import DbConnector
|
||||
from .rest_api import AdobeRestApi, PUBLISH_PATHS
|
||||
|
||||
__all__ = [
|
||||
"PUBLISH_PATHS",
|
||||
"DbConnector",
|
||||
"AdobeRestApi"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,460 +0,0 @@
|
|||
"""
|
||||
Wrapper around interactions with the database
|
||||
|
||||
Copy of io module in avalon-core.
|
||||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
from avalon.io import extract_port_from_url
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
object = args[0]
|
||||
for retry in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
object.log.error("Reconnecting..")
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class DbConnector(object):
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.Session = {}
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
cur_proj = self.Session["AVALON_PROJECT"]
|
||||
return self._database[cur_proj].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
return
|
||||
|
||||
logging.basicConfig()
|
||||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
mongo_url = self.Session["AVALON_MONGO"]
|
||||
kwargs = {
|
||||
"host": mongo_url,
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
|
||||
port = extract_port_from_url(mongo_url)
|
||||
if port is not None:
|
||||
kwargs["port"] = int(port)
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(**kwargs)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
t1 = time.time()
|
||||
self._mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
self.log.error("Retrying..")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||
|
||||
self._install_sentry()
|
||||
|
||||
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||
self._is_installed = True
|
||||
|
||||
def _install_sentry(self):
|
||||
if "AVALON_SENTRY" not in self.Session:
|
||||
return
|
||||
|
||||
try:
|
||||
from raven import Client
|
||||
from raven.handlers.logging import SentryHandler
|
||||
from raven.conf import setup_logging
|
||||
except ImportError:
|
||||
# Note: There was a Sentry address in this Session
|
||||
return self.log.warning("Sentry disabled, raven not installed")
|
||||
|
||||
client = Client(self.Session["AVALON_SENTRY"])
|
||||
|
||||
# Transmit log messages to Sentry
|
||||
handler = SentryHandler(client)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
setup_logging(handler)
|
||||
|
||||
self._sentry_client = client
|
||||
self._sentry_logging_handler = handler
|
||||
self.log.info(
|
||||
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||
)
|
||||
|
||||
def _from_environment(self):
|
||||
Session = {
|
||||
item[0]: os.getenv(item[0], item[1])
|
||||
for item in (
|
||||
# Root directory of projects on disk
|
||||
("AVALON_PROJECTS", None),
|
||||
|
||||
# Name of current Project
|
||||
("AVALON_PROJECT", ""),
|
||||
|
||||
# Name of current Asset
|
||||
("AVALON_ASSET", ""),
|
||||
|
||||
# Name of current silo
|
||||
("AVALON_SILO", ""),
|
||||
|
||||
# Name of current task
|
||||
("AVALON_TASK", None),
|
||||
|
||||
# Name of current app
|
||||
("AVALON_APP", None),
|
||||
|
||||
# Path to working directory
|
||||
("AVALON_WORKDIR", None),
|
||||
|
||||
# Name of current Config
|
||||
# TODO(marcus): Establish a suitable default config
|
||||
("AVALON_CONFIG", "no_config"),
|
||||
|
||||
# Name of Avalon in graphical user interfaces
|
||||
# Use this to customise the visual appearance of Avalon
|
||||
# to better integrate with your surrounding pipeline
|
||||
("AVALON_LABEL", "Avalon"),
|
||||
|
||||
# Used during any connections to the outside world
|
||||
("AVALON_TIMEOUT", "1000"),
|
||||
|
||||
# Address to Asset Database
|
||||
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||
|
||||
# Name of database used in MongoDB
|
||||
("AVALON_DB", "avalon"),
|
||||
|
||||
# Address to Sentry
|
||||
("AVALON_SENTRY", None),
|
||||
|
||||
# Address to Deadline Web Service
|
||||
# E.g. http://192.167.0.1:8082
|
||||
("AVALON_DEADLINE", None),
|
||||
|
||||
# Enable features not necessarily stable. The user's own risk
|
||||
("AVALON_EARLY_ADOPTER", None),
|
||||
|
||||
# Address of central asset repository, contains
|
||||
# the following interface:
|
||||
# /upload
|
||||
# /download
|
||||
# /manager (optional)
|
||||
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||
|
||||
# Boolean of whether to upload published material
|
||||
# to central asset repository
|
||||
("AVALON_UPLOAD", None),
|
||||
|
||||
# Generic username and password
|
||||
("AVALON_USERNAME", "avalon"),
|
||||
("AVALON_PASSWORD", "secret"),
|
||||
|
||||
# Unique identifier for instances in working files
|
||||
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||
|
||||
# Enable debugging
|
||||
("AVALON_DEBUG", None),
|
||||
|
||||
) if os.getenv(item[0], item[1]) is not None
|
||||
}
|
||||
|
||||
Session["schema"] = "avalon-core:session-2.0"
|
||||
try:
|
||||
schema.validate(Session)
|
||||
except schema.ValidationError as e:
|
||||
# TODO(marcus): Make this mandatory
|
||||
self.log.warning(e)
|
||||
|
||||
return Session
|
||||
|
||||
def uninstall(self):
|
||||
"""Close any connection to the database"""
|
||||
try:
|
||||
self._mongo_client.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._mongo_client = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def active_project(self):
|
||||
"""Return the name of the active project"""
|
||||
return self.Session["AVALON_PROJECT"]
|
||||
|
||||
def activate_project(self, project_name):
|
||||
self.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
def projects(self):
|
||||
"""List available projects
|
||||
|
||||
Returns:
|
||||
list of project documents
|
||||
|
||||
"""
|
||||
|
||||
collection_names = self.collections()
|
||||
for project in collection_names:
|
||||
if project in ("system.indexes",):
|
||||
continue
|
||||
|
||||
# Each collection will have exactly one project document
|
||||
document = self.find_project(project)
|
||||
|
||||
if document is not None:
|
||||
yield document
|
||||
|
||||
def locate(self, path):
|
||||
"""Traverse a hierarchy from top-to-bottom
|
||||
|
||||
Example:
|
||||
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||
|
||||
Returns:
|
||||
representation (ObjectId)
|
||||
|
||||
"""
|
||||
|
||||
components = zip(
|
||||
("project", "asset", "subset", "version", "representation"),
|
||||
path
|
||||
)
|
||||
|
||||
parent = None
|
||||
for type_, name in components:
|
||||
latest = (type_ == "version") and name in (None, -1)
|
||||
|
||||
try:
|
||||
if latest:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
sort=[("name", -1)]
|
||||
)["_id"]
|
||||
else:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"name": name,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
)["_id"]
|
||||
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
return parent
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
|
||||
@auto_reconnect
|
||||
def find_project(self, project):
|
||||
return self._database[project].find_one({"type": "project"})
|
||||
|
||||
@auto_reconnect
|
||||
def insert_one(self, item):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
schema.validate(item)
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True):
|
||||
# check if all items are valid
|
||||
assert isinstance(items, list), "`items` must be of type <list>"
|
||||
for item in items:
|
||||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
schema.validate(item)
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||
items,
|
||||
ordered=ordered)
|
||||
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def save(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||
filter, replacement)
|
||||
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||
filter, update)
|
||||
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def drop(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def delete_many(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||
*args, **kwargs)
|
||||
|
||||
def parenthood(self, document):
|
||||
assert document is not None, "This is a bug"
|
||||
|
||||
parents = list()
|
||||
|
||||
while document.get("parent") is not None:
|
||||
document = self.find_one({"_id": document["parent"]})
|
||||
|
||||
if document is None:
|
||||
break
|
||||
|
||||
if document.get("type") == "master_version":
|
||||
_document = self.find_one({"_id": document["version_id"]})
|
||||
document["data"] = _document["data"]
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(self):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def download(self, src, dst):
|
||||
"""Download `src` to `dst`
|
||||
|
||||
Arguments:
|
||||
src (str): URL to source file
|
||||
dst (str): Absolute path to destination file
|
||||
|
||||
Yields tuple (progress, error):
|
||||
progress (int): Between 0-100
|
||||
error (Exception): Any exception raised when first making connection
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
src,
|
||||
stream=True,
|
||||
auth=requests.auth.HTTPBasicAuth(
|
||||
self.Session["AVALON_USERNAME"],
|
||||
self.Session["AVALON_PASSWORD"]
|
||||
)
|
||||
)
|
||||
except requests.ConnectionError as e:
|
||||
yield None, e
|
||||
return
|
||||
|
||||
with self.tempdir() as dirname:
|
||||
tmp = os.path.join(dirname, os.path.basename(src))
|
||||
|
||||
with open(tmp, "wb") as f:
|
||||
total_length = response.headers.get("content-length")
|
||||
|
||||
if total_length is None: # no content length header
|
||||
f.write(response.content)
|
||||
else:
|
||||
downloaded = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
downloaded += len(data)
|
||||
f.write(data)
|
||||
|
||||
yield int(100.0 * downloaded / total_length), None
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
except OSError as e:
|
||||
# An already existing destination directory is fine.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
shutil.copy(tmp, dst)
|
||||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import copy
|
||||
from pype.modules.rest_api import RestApi, route, abort, CallbackResult
|
||||
from .io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
from pype.api import config, execute, Logger
|
||||
|
||||
log = Logger().get_logger("AdobeCommunicator")
|
||||
|
|
@ -14,7 +14,7 @@ PUBLISH_PATHS = []
|
|||
|
||||
|
||||
class AdobeRestApi(RestApi):
|
||||
dbcon = DbConnector()
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -4,14 +4,14 @@ import json
|
|||
import bson
|
||||
import bson.json_util
|
||||
from pype.modules.rest_api import RestApi, abort, CallbackResult
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
class AvalonRestApi(RestApi):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dbcon = DbConnector()
|
||||
self.dbcon = AvalonMongoDB()
|
||||
self.dbcon.install()
|
||||
|
||||
@RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET")
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from queue import Queue
|
|||
|
||||
from bson.objectid import ObjectId
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
class DeleteAssetSubset(BaseAction):
|
||||
|
|
@ -21,7 +21,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
#: Db connection
|
||||
dbcon = DbConnector()
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
splitter = {"type": "label", "value": "---"}
|
||||
action_data_by_id = {}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import clique
|
|||
from pymongo import UpdateOne
|
||||
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
from pype.api import Anatomy
|
||||
|
||||
import avalon.pipeline
|
||||
|
|
@ -24,7 +24,7 @@ class DeleteOldVersions(BaseAction):
|
|||
role_list = ["Pypeclub", "Project Manager", "Administrator"]
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
dbcon = DbConnector()
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
inteface_title = "Choose your preferences"
|
||||
splitter_item = {"type": "label", "value": "---"}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import copy
|
||||
import json
|
||||
import shutil
|
||||
import collections
|
||||
|
||||
|
|
@ -9,10 +10,10 @@ from bson.objectid import ObjectId
|
|||
from avalon import pipeline
|
||||
from avalon.vendor import filelink
|
||||
|
||||
from pype.api import Anatomy
|
||||
from pype.api import Anatomy, config
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
class Delivery(BaseAction):
|
||||
|
|
@ -23,7 +24,7 @@ class Delivery(BaseAction):
|
|||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = statics_icon("ftrack", "action_icons", "Delivery.svg")
|
||||
|
||||
db_con = DbConnector()
|
||||
db_con = AvalonMongoDB()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
for entity in entities:
|
||||
|
|
@ -41,36 +42,22 @@ class Delivery(BaseAction):
|
|||
items = []
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
# Prepare component names for processing
|
||||
components = None
|
||||
project = None
|
||||
for entity in entities:
|
||||
if project is None:
|
||||
project_id = None
|
||||
for ent_info in entity["link"]:
|
||||
if ent_info["type"].lower() == "project":
|
||||
project_id = ent_info["id"]
|
||||
break
|
||||
project_entity = self.get_project_from_entity(entities[0])
|
||||
project_name = project_entity["full_name"]
|
||||
self.db_con.install()
|
||||
self.db_con.Session["AVALON_PROJECT"] = project_name
|
||||
project_doc = self.db_con.find_one({"type": "project"})
|
||||
if not project_doc:
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Didn't found project \"{}\" in avalon."
|
||||
).format(project_name)
|
||||
}
|
||||
|
||||
if project_id is None:
|
||||
project = entity["asset"]["parent"]["project"]
|
||||
else:
|
||||
project = session.query((
|
||||
"select id, full_name from Project where id is \"{}\""
|
||||
).format(project_id)).one()
|
||||
repre_names = self._get_repre_names(entities)
|
||||
self.db_con.uninstall()
|
||||
|
||||
_components = set(
|
||||
[component["name"] for component in entity["components"]]
|
||||
)
|
||||
if components is None:
|
||||
components = _components
|
||||
continue
|
||||
|
||||
components = components.intersection(_components)
|
||||
if not components:
|
||||
break
|
||||
|
||||
project_name = project["full_name"]
|
||||
items.append({
|
||||
"type": "hidden",
|
||||
"name": "__project_name__",
|
||||
|
|
@ -93,7 +80,7 @@ class Delivery(BaseAction):
|
|||
|
||||
skipped = False
|
||||
# Add message if there are any common components
|
||||
if not components or not new_anatomies:
|
||||
if not repre_names or not new_anatomies:
|
||||
skipped = True
|
||||
items.append({
|
||||
"type": "label",
|
||||
|
|
@ -106,7 +93,7 @@ class Delivery(BaseAction):
|
|||
"value": skipped
|
||||
})
|
||||
|
||||
if not components:
|
||||
if not repre_names:
|
||||
if len(entities) == 1:
|
||||
items.append({
|
||||
"type": "label",
|
||||
|
|
@ -143,12 +130,12 @@ class Delivery(BaseAction):
|
|||
"type": "label"
|
||||
})
|
||||
|
||||
for component in components:
|
||||
for repre_name in repre_names:
|
||||
items.append({
|
||||
"type": "boolean",
|
||||
"value": False,
|
||||
"label": component,
|
||||
"name": component
|
||||
"label": repre_name,
|
||||
"name": repre_name
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
|
|
@ -198,27 +185,233 @@ class Delivery(BaseAction):
|
|||
"title": title
|
||||
}
|
||||
|
||||
def _get_repre_names(self, entities):
|
||||
version_ids = self._get_interest_version_ids(entities)
|
||||
repre_docs = self.db_con.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": version_ids}
|
||||
})
|
||||
return list(sorted(repre_docs.distinct("name")))
|
||||
|
||||
def _get_interest_version_ids(self, entities):
|
||||
parent_ent_by_id = {}
|
||||
subset_names = set()
|
||||
version_nums = set()
|
||||
for entity in entities:
|
||||
asset = entity["asset"]
|
||||
parent = asset["parent"]
|
||||
parent_ent_by_id[parent["id"]] = parent
|
||||
|
||||
subset_name = asset["name"]
|
||||
subset_names.add(subset_name)
|
||||
|
||||
version = entity["version"]
|
||||
version_nums.add(version)
|
||||
|
||||
asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id)
|
||||
subset_docs = self._get_subset_docs(
|
||||
asset_docs_by_ftrack_id, subset_names, entities
|
||||
)
|
||||
version_docs = self._get_version_docs(
|
||||
asset_docs_by_ftrack_id, subset_docs, version_nums, entities
|
||||
)
|
||||
|
||||
return [version_doc["_id"] for version_doc in version_docs]
|
||||
|
||||
def _get_version_docs(
|
||||
self, asset_docs_by_ftrack_id, subset_docs, version_nums, entities
|
||||
):
|
||||
subset_docs_by_id = {
|
||||
subset_doc["_id"]: subset_doc
|
||||
for subset_doc in subset_docs
|
||||
}
|
||||
version_docs = list(self.db_con.find({
|
||||
"type": "version",
|
||||
"parent": {"$in": list(subset_docs_by_id.keys())},
|
||||
"name": {"$in": list(version_nums)}
|
||||
}))
|
||||
version_docs_by_parent_id = collections.defaultdict(dict)
|
||||
for version_doc in version_docs:
|
||||
subset_doc = subset_docs_by_id[version_doc["parent"]]
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_name = subset_doc["name"]
|
||||
version = version_doc["name"]
|
||||
if version_docs_by_parent_id[asset_id].get(subset_name) is None:
|
||||
version_docs_by_parent_id[asset_id][subset_name] = {}
|
||||
|
||||
version_docs_by_parent_id[asset_id][subset_name][version] = (
|
||||
version_doc
|
||||
)
|
||||
|
||||
filtered_versions = []
|
||||
for entity in entities:
|
||||
asset = entity["asset"]
|
||||
|
||||
parent = asset["parent"]
|
||||
asset_doc = asset_docs_by_ftrack_id[parent["id"]]
|
||||
|
||||
subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"])
|
||||
if not subsets_by_name:
|
||||
continue
|
||||
|
||||
subset_name = asset["name"]
|
||||
version_docs_by_version = subsets_by_name.get(subset_name)
|
||||
if not version_docs_by_version:
|
||||
continue
|
||||
|
||||
version = entity["version"]
|
||||
version_doc = version_docs_by_version.get(version)
|
||||
if version_doc:
|
||||
filtered_versions.append(version_doc)
|
||||
return filtered_versions
|
||||
|
||||
def _get_subset_docs(
|
||||
self, asset_docs_by_ftrack_id, subset_names, entities
|
||||
):
|
||||
asset_doc_ids = list()
|
||||
for asset_doc in asset_docs_by_ftrack_id.values():
|
||||
asset_doc_ids.append(asset_doc["_id"])
|
||||
|
||||
subset_docs = list(self.db_con.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": asset_doc_ids},
|
||||
"name": {"$in": list(subset_names)}
|
||||
}))
|
||||
subset_docs_by_parent_id = collections.defaultdict(dict)
|
||||
for subset_doc in subset_docs:
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_name = subset_doc["name"]
|
||||
subset_docs_by_parent_id[asset_id][subset_name] = subset_doc
|
||||
|
||||
filtered_subsets = []
|
||||
for entity in entities:
|
||||
asset = entity["asset"]
|
||||
|
||||
parent = asset["parent"]
|
||||
asset_doc = asset_docs_by_ftrack_id[parent["id"]]
|
||||
|
||||
subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"])
|
||||
if not subsets_by_name:
|
||||
continue
|
||||
|
||||
subset_name = asset["name"]
|
||||
subset_doc = subsets_by_name.get(subset_name)
|
||||
if subset_doc:
|
||||
filtered_subsets.append(subset_doc)
|
||||
return filtered_subsets
|
||||
|
||||
def _get_asset_docs(self, parent_ent_by_id):
|
||||
asset_docs = list(self.db_con.find({
|
||||
"type": "asset",
|
||||
"data.ftrackId": {"$in": list(parent_ent_by_id.keys())}
|
||||
}))
|
||||
asset_docs_by_ftrack_id = {
|
||||
asset_doc["data"]["ftrackId"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
|
||||
entities_by_mongo_id = {}
|
||||
entities_by_names = {}
|
||||
for ftrack_id, entity in parent_ent_by_id.items():
|
||||
if ftrack_id not in asset_docs_by_ftrack_id:
|
||||
parent_mongo_id = entity["custom_attributes"].get(
|
||||
CUST_ATTR_ID_KEY
|
||||
)
|
||||
if parent_mongo_id:
|
||||
entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity
|
||||
else:
|
||||
entities_by_names[entity["name"]] = entity
|
||||
|
||||
expressions = []
|
||||
if entities_by_mongo_id:
|
||||
expression = {
|
||||
"type": "asset",
|
||||
"_id": {"$in": list(entities_by_mongo_id.keys())}
|
||||
}
|
||||
expressions.append(expression)
|
||||
|
||||
if entities_by_names:
|
||||
expression = {
|
||||
"type": "asset",
|
||||
"name": {"$in": list(entities_by_names.keys())}
|
||||
}
|
||||
expressions.append(expression)
|
||||
|
||||
if expressions:
|
||||
if len(expressions) == 1:
|
||||
filter = expressions[0]
|
||||
else:
|
||||
filter = {"$or": expressions}
|
||||
|
||||
asset_docs = self.db_con.find(filter)
|
||||
for asset_doc in asset_docs:
|
||||
if asset_doc["_id"] in entities_by_mongo_id:
|
||||
entity = entities_by_mongo_id[asset_doc["_id"]]
|
||||
asset_docs_by_ftrack_id[entity["id"]] = asset_doc
|
||||
|
||||
elif asset_doc["name"] in entities_by_names:
|
||||
entity = entities_by_names[asset_doc["name"]]
|
||||
asset_docs_by_ftrack_id[entity["id"]] = asset_doc
|
||||
|
||||
return asset_docs_by_ftrack_id
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
|
||||
self.report_items = collections.defaultdict(list)
|
||||
|
||||
values = event["data"]["values"]
|
||||
skipped = values.pop("__skipped__")
|
||||
if skipped:
|
||||
return None
|
||||
|
||||
component_names = []
|
||||
user_id = event["source"]["user"]["id"]
|
||||
user_entity = session.query(
|
||||
"User where id is {}".format(user_id)
|
||||
).one()
|
||||
|
||||
job = session.create("Job", {
|
||||
"user": user_entity,
|
||||
"status": "running",
|
||||
"data": json.dumps({
|
||||
"description": "Delivery processing."
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
|
||||
try:
|
||||
self.db_con.install()
|
||||
self.real_launch(session, entities, event)
|
||||
job["status"] = "done"
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed during processing delivery action.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
if job["status"] != "done":
|
||||
job["status"] = "failed"
|
||||
session.commit()
|
||||
self.db_con.uninstall()
|
||||
|
||||
def real_launch(self, session, entities, event):
|
||||
self.log.info("Delivery action just started.")
|
||||
report_items = collections.defaultdict(list)
|
||||
|
||||
values = event["data"]["values"]
|
||||
|
||||
location_path = values.pop("__location_path__")
|
||||
anatomy_name = values.pop("__new_anatomies__")
|
||||
project_name = values.pop("__project_name__")
|
||||
|
||||
repre_names = []
|
||||
for key, value in values.items():
|
||||
if value is True:
|
||||
component_names.append(key)
|
||||
repre_names.append(key)
|
||||
|
||||
if not component_names:
|
||||
if not repre_names:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Not selected components to deliver."
|
||||
|
|
@ -230,64 +423,15 @@ class Delivery(BaseAction):
|
|||
if not os.path.exists(location_path):
|
||||
os.makedirs(location_path)
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
repres_to_deliver = []
|
||||
for entity in entities:
|
||||
asset = entity["asset"]
|
||||
subset_name = asset["name"]
|
||||
version = entity["version"]
|
||||
|
||||
parent = asset["parent"]
|
||||
parent_mongo_id = parent["custom_attributes"].get(CUST_ATTR_ID_KEY)
|
||||
if parent_mongo_id:
|
||||
parent_mongo_id = ObjectId(parent_mongo_id)
|
||||
else:
|
||||
asset_ent = self.db_con.find_one({
|
||||
"type": "asset",
|
||||
"data.ftrackId": parent["id"]
|
||||
})
|
||||
if not asset_ent:
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in parent["link"]]
|
||||
)
|
||||
msg = "Not synchronized entities to avalon"
|
||||
self.report_items[msg].append(ent_path)
|
||||
self.log.warning("{} <{}>".format(msg, ent_path))
|
||||
continue
|
||||
|
||||
parent_mongo_id = asset_ent["_id"]
|
||||
|
||||
subset_ent = self.db_con.find_one({
|
||||
"type": "subset",
|
||||
"parent": parent_mongo_id,
|
||||
"name": subset_name
|
||||
})
|
||||
|
||||
version_ent = self.db_con.find_one({
|
||||
"type": "version",
|
||||
"name": version,
|
||||
"parent": subset_ent["_id"]
|
||||
})
|
||||
|
||||
repre_ents = self.db_con.find({
|
||||
"type": "representation",
|
||||
"parent": version_ent["_id"]
|
||||
})
|
||||
|
||||
repres_by_name = {}
|
||||
for repre in repre_ents:
|
||||
repre_name = repre["name"]
|
||||
repres_by_name[repre_name] = repre
|
||||
|
||||
for component in entity["components"]:
|
||||
comp_name = component["name"]
|
||||
if comp_name not in component_names:
|
||||
continue
|
||||
|
||||
repre = repres_by_name.get(comp_name)
|
||||
repres_to_deliver.append(repre)
|
||||
self.log.debug("Collecting representations to process.")
|
||||
version_ids = self._get_interest_version_ids(entities)
|
||||
repres_to_deliver = list(self.db_con.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": version_ids},
|
||||
"name": {"$in": repre_names}
|
||||
}))
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
|
|
@ -304,9 +448,17 @@ class Delivery(BaseAction):
|
|||
for name in root_names:
|
||||
format_dict["root"][name] = location_path
|
||||
|
||||
datetime_data = config.get_datetime_data()
|
||||
for repre in repres_to_deliver:
|
||||
source_path = repre.get("data", {}).get("path")
|
||||
debug_msg = "Processing representation {}".format(repre["_id"])
|
||||
if source_path:
|
||||
debug_msg += " with published path {}.".format(source_path)
|
||||
self.log.debug(debug_msg)
|
||||
|
||||
# Get destination repre path
|
||||
anatomy_data = copy.deepcopy(repre["context"])
|
||||
anatomy_data.update(datetime_data)
|
||||
anatomy_filled = anatomy.format_all(anatomy_data)
|
||||
test_path = anatomy_filled["delivery"][anatomy_name]
|
||||
|
||||
|
|
@ -333,7 +485,7 @@ class Delivery(BaseAction):
|
|||
"- Invalid value DataType: \"{}\"<br>"
|
||||
).format(str(repre["_id"]), keys)
|
||||
|
||||
self.report_items[msg].append(sub_msg)
|
||||
report_items[msg].append(sub_msg)
|
||||
self.log.warning(
|
||||
"{} Representation: \"{}\" Filled: <{}>".format(
|
||||
msg, str(repre["_id"]), str(test_path)
|
||||
|
|
@ -355,20 +507,19 @@ class Delivery(BaseAction):
|
|||
anatomy,
|
||||
anatomy_name,
|
||||
anatomy_data,
|
||||
format_dict
|
||||
format_dict,
|
||||
report_items
|
||||
)
|
||||
|
||||
if not frame:
|
||||
self.process_single_file(*args)
|
||||
else:
|
||||
self.process_sequence(*args)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
return self.report()
|
||||
return self.report(report_items)
|
||||
|
||||
def process_single_file(
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict,
|
||||
report_items
|
||||
):
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
if format_dict:
|
||||
|
|
@ -384,7 +535,8 @@ class Delivery(BaseAction):
|
|||
self.copy_file(repre_path, delivery_path)
|
||||
|
||||
def process_sequence(
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data, format_dict,
|
||||
report_items
|
||||
):
|
||||
dir_path, file_name = os.path.split(str(repre_path))
|
||||
|
||||
|
|
@ -398,7 +550,7 @@ class Delivery(BaseAction):
|
|||
|
||||
if not file_name_items:
|
||||
msg = "Source file was not found"
|
||||
self.report_items[msg].append(repre_path)
|
||||
report_items[msg].append(repre_path)
|
||||
self.log.warning("{} <{}>".format(msg, repre_path))
|
||||
return
|
||||
|
||||
|
|
@ -418,7 +570,7 @@ class Delivery(BaseAction):
|
|||
if src_collection is None:
|
||||
# TODO log error!
|
||||
msg = "Source collection of files was not found"
|
||||
self.report_items[msg].append(repre_path)
|
||||
report_items[msg].append(repre_path)
|
||||
self.log.warning("{} <{}>".format(msg, repre_path))
|
||||
return
|
||||
|
||||
|
|
@ -491,10 +643,10 @@ class Delivery(BaseAction):
|
|||
except OSError:
|
||||
shutil.copyfile(src_path, dst_path)
|
||||
|
||||
def report(self):
|
||||
def report(self, report_items):
|
||||
items = []
|
||||
title = "Delivery report"
|
||||
for msg, _items in self.report_items.items():
|
||||
for msg, _items in report_items.items():
|
||||
if not _items:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import json
|
|||
from bson.objectid import ObjectId
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import Anatomy
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
|
||||
|
|
@ -25,7 +25,7 @@ class StoreThumbnailsToAvalon(BaseAction):
|
|||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
thumbnail_key = "AVALON_THUMBNAIL_ROOT"
|
||||
db_con = DbConnector()
|
||||
db_con = AvalonMongoDB()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
for entity in entities:
|
||||
|
|
|
|||
|
|
@ -19,12 +19,12 @@ from pype.modules.ftrack.lib.avalon_sync import (
|
|||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseEvent
|
||||
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
class SyncToAvalonEvent(BaseEvent):
|
||||
|
||||
dbcon = DbConnector()
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
interest_entTypes = ["show", "task"]
|
||||
ignore_ent_types = ["Milestone"]
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import subprocess
|
|||
|
||||
from pype.modules.ftrack import BaseEvent
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ class UserAssigmentEvent(BaseEvent):
|
|||
3) path to publish files of task user was (de)assigned to
|
||||
"""
|
||||
|
||||
db_con = DbConnector()
|
||||
db_con = AvalonMongoDB()
|
||||
|
||||
def error(self, *err):
|
||||
for e in err:
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import json
|
|||
import collections
|
||||
import copy
|
||||
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
import avalon
|
||||
import avalon.api
|
||||
|
|
@ -240,7 +240,7 @@ def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
|
|||
|
||||
|
||||
class SyncEntitiesFactory:
|
||||
dbcon = DbConnector()
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
project_query = (
|
||||
"select full_name, name, custom_attributes"
|
||||
|
|
|
|||
|
|
@ -1,460 +0,0 @@
|
|||
"""
|
||||
Wrapper around interactions with the database
|
||||
|
||||
Copy of io module in avalon-core.
|
||||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
from avalon.io import extract_port_from_url
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
object = args[0]
|
||||
for retry in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
object.log.error("Reconnecting..")
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class DbConnector(object):
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.Session = {}
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
cur_proj = self.Session["AVALON_PROJECT"]
|
||||
return self._database[cur_proj].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
return
|
||||
|
||||
logging.basicConfig()
|
||||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
mongo_url = self.Session["AVALON_MONGO"]
|
||||
kwargs = {
|
||||
"host": mongo_url,
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
|
||||
port = extract_port_from_url(mongo_url)
|
||||
if port is not None:
|
||||
kwargs["port"] = int(port)
|
||||
|
||||
self._mongo_client = pymongo.MongoClient(**kwargs)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
t1 = time.time()
|
||||
self._mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
self.log.error("Retrying..")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||
|
||||
self._install_sentry()
|
||||
|
||||
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||
self._is_installed = True
|
||||
|
||||
def _install_sentry(self):
|
||||
if "AVALON_SENTRY" not in self.Session:
|
||||
return
|
||||
|
||||
try:
|
||||
from raven import Client
|
||||
from raven.handlers.logging import SentryHandler
|
||||
from raven.conf import setup_logging
|
||||
except ImportError:
|
||||
# Note: There was a Sentry address in this Session
|
||||
return self.log.warning("Sentry disabled, raven not installed")
|
||||
|
||||
client = Client(self.Session["AVALON_SENTRY"])
|
||||
|
||||
# Transmit log messages to Sentry
|
||||
handler = SentryHandler(client)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
setup_logging(handler)
|
||||
|
||||
self._sentry_client = client
|
||||
self._sentry_logging_handler = handler
|
||||
self.log.info(
|
||||
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||
)
|
||||
|
||||
def _from_environment(self):
|
||||
Session = {
|
||||
item[0]: os.getenv(item[0], item[1])
|
||||
for item in (
|
||||
# Root directory of projects on disk
|
||||
("AVALON_PROJECTS", None),
|
||||
|
||||
# Name of current Project
|
||||
("AVALON_PROJECT", ""),
|
||||
|
||||
# Name of current Asset
|
||||
("AVALON_ASSET", ""),
|
||||
|
||||
# Name of current silo
|
||||
("AVALON_SILO", ""),
|
||||
|
||||
# Name of current task
|
||||
("AVALON_TASK", None),
|
||||
|
||||
# Name of current app
|
||||
("AVALON_APP", None),
|
||||
|
||||
# Path to working directory
|
||||
("AVALON_WORKDIR", None),
|
||||
|
||||
# Name of current Config
|
||||
# TODO(marcus): Establish a suitable default config
|
||||
("AVALON_CONFIG", "no_config"),
|
||||
|
||||
# Name of Avalon in graphical user interfaces
|
||||
# Use this to customise the visual appearance of Avalon
|
||||
# to better integrate with your surrounding pipeline
|
||||
("AVALON_LABEL", "Avalon"),
|
||||
|
||||
# Used during any connections to the outside world
|
||||
("AVALON_TIMEOUT", "1000"),
|
||||
|
||||
# Address to Asset Database
|
||||
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||
|
||||
# Name of database used in MongoDB
|
||||
("AVALON_DB", "avalon"),
|
||||
|
||||
# Address to Sentry
|
||||
("AVALON_SENTRY", None),
|
||||
|
||||
# Address to Deadline Web Service
|
||||
# E.g. http://192.167.0.1:8082
|
||||
("AVALON_DEADLINE", None),
|
||||
|
||||
# Enable features not necessarily stable. The user's own risk
|
||||
("AVALON_EARLY_ADOPTER", None),
|
||||
|
||||
# Address of central asset repository, contains
|
||||
# the following interface:
|
||||
# /upload
|
||||
# /download
|
||||
# /manager (optional)
|
||||
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||
|
||||
# Boolean of whether to upload published material
|
||||
# to central asset repository
|
||||
("AVALON_UPLOAD", None),
|
||||
|
||||
# Generic username and password
|
||||
("AVALON_USERNAME", "avalon"),
|
||||
("AVALON_PASSWORD", "secret"),
|
||||
|
||||
# Unique identifier for instances in working files
|
||||
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||
|
||||
# Enable debugging
|
||||
("AVALON_DEBUG", None),
|
||||
|
||||
) if os.getenv(item[0], item[1]) is not None
|
||||
}
|
||||
|
||||
Session["schema"] = "avalon-core:session-2.0"
|
||||
try:
|
||||
schema.validate(Session)
|
||||
except schema.ValidationError as e:
|
||||
# TODO(marcus): Make this mandatory
|
||||
self.log.warning(e)
|
||||
|
||||
return Session
|
||||
|
||||
def uninstall(self):
|
||||
"""Close any connection to the database"""
|
||||
try:
|
||||
self._mongo_client.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._mongo_client = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def active_project(self):
|
||||
"""Return the name of the active project"""
|
||||
return self.Session["AVALON_PROJECT"]
|
||||
|
||||
def activate_project(self, project_name):
|
||||
self.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
def projects(self):
|
||||
"""List available projects
|
||||
|
||||
Returns:
|
||||
list of project documents
|
||||
|
||||
"""
|
||||
|
||||
collection_names = self.collections()
|
||||
for project in collection_names:
|
||||
if project in ("system.indexes",):
|
||||
continue
|
||||
|
||||
# Each collection will have exactly one project document
|
||||
document = self.find_project(project)
|
||||
|
||||
if document is not None:
|
||||
yield document
|
||||
|
||||
def locate(self, path):
|
||||
"""Traverse a hierarchy from top-to-bottom
|
||||
|
||||
Example:
|
||||
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||
|
||||
Returns:
|
||||
representation (ObjectId)
|
||||
|
||||
"""
|
||||
|
||||
components = zip(
|
||||
("project", "asset", "subset", "version", "representation"),
|
||||
path
|
||||
)
|
||||
|
||||
parent = None
|
||||
for type_, name in components:
|
||||
latest = (type_ == "version") and name in (None, -1)
|
||||
|
||||
try:
|
||||
if latest:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
sort=[("name", -1)]
|
||||
)["_id"]
|
||||
else:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"name": name,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
)["_id"]
|
||||
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
return parent
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
|
||||
@auto_reconnect
|
||||
def find_project(self, project):
|
||||
return self._database[project].find_one({"type": "project"})
|
||||
|
||||
@auto_reconnect
|
||||
def insert_one(self, item):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
schema.validate(item)
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True):
|
||||
# check if all items are valid
|
||||
assert isinstance(items, list), "`items` must be of type <list>"
|
||||
for item in items:
|
||||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
schema.validate(item)
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||
items,
|
||||
ordered=ordered)
|
||||
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def save(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||
filter, replacement)
|
||||
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||
filter, update)
|
||||
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def drop(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def delete_many(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||
*args, **kwargs)
|
||||
|
||||
def parenthood(self, document):
|
||||
assert document is not None, "This is a bug"
|
||||
|
||||
parents = list()
|
||||
|
||||
while document.get("parent") is not None:
|
||||
document = self.find_one({"_id": document["parent"]})
|
||||
|
||||
if document is None:
|
||||
break
|
||||
|
||||
if document.get("type") == "master_version":
|
||||
_document = self.find_one({"_id": document["version_id"]})
|
||||
document["data"] = _document["data"]
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(self):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def download(self, src, dst):
|
||||
"""Download `src` to `dst`
|
||||
|
||||
Arguments:
|
||||
src (str): URL to source file
|
||||
dst (str): Absolute path to destination file
|
||||
|
||||
Yields tuple (progress, error):
|
||||
progress (int): Between 0-100
|
||||
error (Exception): Any exception raised when first making connection
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
src,
|
||||
stream=True,
|
||||
auth=requests.auth.HTTPBasicAuth(
|
||||
self.Session["AVALON_USERNAME"],
|
||||
self.Session["AVALON_PASSWORD"]
|
||||
)
|
||||
)
|
||||
except requests.ConnectionError as e:
|
||||
yield None, e
|
||||
return
|
||||
|
||||
with self.tempdir() as dirname:
|
||||
tmp = os.path.join(dirname, os.path.basename(src))
|
||||
|
||||
with open(tmp, "wb") as f:
|
||||
total_length = response.headers.get("content-length")
|
||||
|
||||
if total_length is None: # no content length header
|
||||
f.write(response.content)
|
||||
else:
|
||||
downloaded = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
downloaded += len(data)
|
||||
f.write(data)
|
||||
|
||||
yield int(100.0 * downloaded / total_length), None
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
except OSError as e:
|
||||
# An already existing destination directory is fine.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
shutil.copy(tmp, dst)
|
||||
|
|
@ -88,8 +88,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
instance.data["frameEnd"] - instance.data["frameStart"]
|
||||
)
|
||||
|
||||
if not comp.get('fps'):
|
||||
comp['fps'] = instance.context.data['fps']
|
||||
fps = comp.get('fps')
|
||||
if fps is None:
|
||||
fps = instance.data.get(
|
||||
"fps", instance.context.data['fps']
|
||||
)
|
||||
|
||||
comp['fps'] = fps
|
||||
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.server', ft_session
|
||||
)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,8 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"shell",
|
||||
"nukestudio",
|
||||
"premiere",
|
||||
"standalonepublisher"
|
||||
"standalonepublisher",
|
||||
"harmony"
|
||||
]
|
||||
optional = True
|
||||
|
||||
|
|
|
|||
|
|
@ -83,7 +83,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
for task_name in new_tasks:
|
||||
if task_name not in cur_entity_data["tasks"]:
|
||||
cur_entity_data["tasks"].append(task_name)
|
||||
|
||||
cur_entity_data.update(data)
|
||||
data = cur_entity_data
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -81,6 +81,11 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output arguments from presets
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
|
||||
# If its a movie file, we just want one frame.
|
||||
if repre["ext"] == "mov":
|
||||
jpeg_items.append("-vframes 1")
|
||||
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ import copy
|
|||
import clique
|
||||
import errno
|
||||
import six
|
||||
import re
|
||||
import shutil
|
||||
|
||||
from pymongo import DeleteOne, InsertOne
|
||||
import pyblish.api
|
||||
|
|
@ -952,21 +954,37 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
if integrated_file_sizes:
|
||||
for file_url, _file_size in integrated_file_sizes.items():
|
||||
if not os.path.exists(file_url):
|
||||
self.log.debug(
|
||||
"File {} was not found.".format(file_url)
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
if mode == 'remove':
|
||||
self.log.debug("Removing file ...{}".format(file_url))
|
||||
self.log.debug("Removing file {}".format(file_url))
|
||||
os.remove(file_url)
|
||||
if mode == 'finalize':
|
||||
self.log.debug("Renaming file ...{}".format(file_url))
|
||||
import re
|
||||
os.rename(file_url,
|
||||
re.sub('\.{}$'.format(self.TMP_FILE_EXT),
|
||||
'',
|
||||
file_url)
|
||||
)
|
||||
new_name = re.sub(
|
||||
r'\.{}$'.format(self.TMP_FILE_EXT),
|
||||
'',
|
||||
file_url
|
||||
)
|
||||
|
||||
except FileNotFoundError:
|
||||
pass # file not there, nothing to delete
|
||||
if os.path.exists(new_name):
|
||||
self.log.debug(
|
||||
"Overwriting file {} to {}".format(
|
||||
file_url, new_name
|
||||
)
|
||||
)
|
||||
shutil.copy(file_url, new_name)
|
||||
else:
|
||||
self.log.debug(
|
||||
"Renaming file {} to {}".format(
|
||||
file_url, new_name
|
||||
)
|
||||
)
|
||||
os.rename(file_url, new_name)
|
||||
except OSError:
|
||||
self.log.error("Cannot {} file {}".format(mode, file_url),
|
||||
exc_info=True)
|
||||
|
|
|
|||
|
|
@ -428,7 +428,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
col = rem[0]
|
||||
_, ext = os.path.splitext(col)
|
||||
ext = os.path.splitext(col)[1].lstrip(".")
|
||||
else:
|
||||
# but we really expect only one collection.
|
||||
# Nothing else make sense.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
label = "Validate Version"
|
||||
hosts = ["nuke", "maya", "blender"]
|
||||
hosts = ["nuke", "maya", "blender", "standalonepublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
version = instance.data.get("version")
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func
|
|||
class ImportAudioLoader(api.Loader):
|
||||
"""Import audio."""
|
||||
|
||||
families = ["shot"]
|
||||
families = ["shot", "audio"]
|
||||
representations = ["wav"]
|
||||
label = "Import Audio"
|
||||
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ class ImageSequenceLoader(api.Loader):
|
|||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
families = ["shot", "render", "image", "plate"]
|
||||
families = ["shot", "render", "image", "plate", "reference"]
|
||||
representations = ["jpeg", "png", "jpg"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import subprocess
|
|||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
import pype.lib
|
||||
|
||||
import clique
|
||||
|
||||
|
|
@ -43,6 +44,9 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
frame_start = result[4]
|
||||
frame_end = result[5]
|
||||
audio_path = result[6]
|
||||
if audio_path:
|
||||
instance.data["audio"] = [{"filename": audio_path}]
|
||||
instance.data["fps"] = frame_rate
|
||||
|
||||
# Set output path to temp folder.
|
||||
path = tempfile.mkdtemp()
|
||||
|
|
@ -87,17 +91,13 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
if len(list(col)) > 1:
|
||||
collection = col
|
||||
else:
|
||||
# assert len(collections) == 1, (
|
||||
# "There should only be one image sequence in {}. Found: {}".format(
|
||||
# path, len(collections)
|
||||
# )
|
||||
# )
|
||||
collection = collections[0]
|
||||
|
||||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(path, "thumbnail.png")
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
ffmpeg_path, "-y",
|
||||
"-i", os.path.join(path, list(collections[0])[0]),
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
|
|
@ -117,57 +117,17 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug(output.decode("utf-8"))
|
||||
|
||||
# Generate mov.
|
||||
mov_path = os.path.join(path, instance.data["name"] + ".mov")
|
||||
if os.path.isfile(audio_path):
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", audio_path,
|
||||
"-i",
|
||||
os.path.join(path, collection.head + "%04d" + collection.tail),
|
||||
mov_path
|
||||
]
|
||||
else:
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i",
|
||||
os.path.join(path, collection.head + "%04d" + collection.tail),
|
||||
mov_path
|
||||
]
|
||||
|
||||
process = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
|
||||
output = process.communicate()[0]
|
||||
|
||||
if process.returncode != 0:
|
||||
raise ValueError(output.decode("utf-8"))
|
||||
|
||||
self.log.debug(output.decode("utf-8"))
|
||||
|
||||
# Generate representations.
|
||||
extension = collection.tail[1:]
|
||||
representation = {
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": list(collection),
|
||||
"stagingDir": path
|
||||
}
|
||||
movie = {
|
||||
"name": "mov",
|
||||
"ext": "mov",
|
||||
"files": os.path.basename(mov_path),
|
||||
"stagingDir": path,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"fps": frame_rate,
|
||||
"preview": True,
|
||||
"tags": ["review", "ftrackreview"]
|
||||
"tags": ["review"],
|
||||
"fps": frame_rate
|
||||
}
|
||||
|
||||
thumbnail = {
|
||||
"name": "thumbnail",
|
||||
"ext": "png",
|
||||
|
|
@ -175,7 +135,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
"stagingDir": path,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
instance.data["representations"] = [representation, movie, thumbnail]
|
||||
instance.data["representations"] = [representation, thumbnail]
|
||||
|
||||
# Required for extract_review plugin (L222 onwards).
|
||||
instance.data["frameStart"] = frame_start
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ class CreateReview(avalon.maya.Creator):
|
|||
icon = "video-camera"
|
||||
defaults = ['Main']
|
||||
keepImages = False
|
||||
isolate = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
|
@ -22,6 +23,7 @@ class CreateReview(avalon.maya.Creator):
|
|||
for key, value in animation_data.items():
|
||||
data[key] = value
|
||||
|
||||
data["isolate"] = self.isolate
|
||||
data["keepImages"] = self.keepImages
|
||||
|
||||
self.data = data
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ class CollectReview(pyblish.api.InstancePlugin):
|
|||
data['handles'] = instance.data.get('handles', None)
|
||||
data['step'] = instance.data['step']
|
||||
data['fps'] = instance.data['fps']
|
||||
data["isolate"] = instance.data["isolate"]
|
||||
cmds.setAttr(str(instance) + '.active', 1)
|
||||
self.log.debug('data {}'.format(instance.context[i].data))
|
||||
instance.context[i].data.update(data)
|
||||
|
|
|
|||
|
|
@ -76,6 +76,11 @@ class ExtractPlayblast(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt - 1, edit=True)
|
||||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
# Isolate view is requested by having objects in the set besides a
|
||||
# camera.
|
||||
if instance.data.get("isolate"):
|
||||
preset["isolate"] = instance.data["setMembers"]
|
||||
|
||||
with maintained_time():
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
|
|
|
|||
|
|
@ -77,6 +77,11 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt - 1, edit=True)
|
||||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
# Isolate view is requested by having objects in the set besides a
|
||||
# camera.
|
||||
if instance.data.get("isolate"):
|
||||
preset["isolate"] = instance.data["setMembers"]
|
||||
|
||||
with maintained_time():
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import re
|
|||
import hashlib
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from collections import OrderedDict
|
||||
|
||||
import clique
|
||||
import requests
|
||||
|
|
@ -67,7 +68,7 @@ payload_skeleton = {
|
|||
|
||||
def _format_tiles(
|
||||
filename, index, tiles_x, tiles_y,
|
||||
width, height, prefix, origin="blc"):
|
||||
width, height, prefix):
|
||||
"""Generate tile entries for Deadline tile job.
|
||||
|
||||
Returns two dictionaries - one that can be directly used in Deadline
|
||||
|
|
@ -113,12 +114,14 @@ def _format_tiles(
|
|||
"""
|
||||
tile = 0
|
||||
out = {"JobInfo": {}, "PluginInfo": {}}
|
||||
cfg = {}
|
||||
cfg = OrderedDict()
|
||||
w_space = width / tiles_x
|
||||
h_space = height / tiles_y
|
||||
|
||||
cfg["TilesCropped"] = "False"
|
||||
|
||||
for tile_x in range(1, tiles_x + 1):
|
||||
for tile_y in range(1, tiles_y + 1):
|
||||
for tile_y in reversed(range(1, tiles_y + 1)):
|
||||
tile_prefix = "_tile_{}x{}_{}x{}_".format(
|
||||
tile_x, tile_y,
|
||||
tiles_x,
|
||||
|
|
@ -143,14 +146,13 @@ def _format_tiles(
|
|||
|
||||
cfg["Tile{}".format(tile)] = new_filename
|
||||
cfg["Tile{}Tile".format(tile)] = new_filename
|
||||
cfg["Tile{}FileName".format(tile)] = new_filename
|
||||
cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space
|
||||
if origin == "blc":
|
||||
cfg["Tile{}Y".format(tile)] = (tile_y - 1) * h_space
|
||||
else:
|
||||
cfg["Tile{}Y".format(tile)] = int(height) - ((tile_y - 1) * h_space) # noqa: E501
|
||||
|
||||
cfg["Tile{}Width".format(tile)] = tile_x * w_space
|
||||
cfg["Tile{}Height".format(tile)] = tile_y * h_space
|
||||
cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space)
|
||||
|
||||
cfg["Tile{}Width".format(tile)] = w_space
|
||||
cfg["Tile{}Height".format(tile)] = h_space
|
||||
|
||||
tile += 1
|
||||
return out, cfg
|
||||
|
|
@ -538,7 +540,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": [],
|
||||
"JobInfo": {
|
||||
"BatchName": payload["JobInfo"]["BatchName"],
|
||||
"Frames": 0,
|
||||
"Frames": 1,
|
||||
"Name": "{} - Tile Assembly Job".format(
|
||||
payload["JobInfo"]["Name"]),
|
||||
"OutputDirectory0":
|
||||
|
|
@ -590,7 +592,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
payload["JobInfo"]["Name"],
|
||||
frame,
|
||||
instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501
|
||||
)
|
||||
)
|
||||
self.log.info(
|
||||
"... preparing job {}".format(
|
||||
new_payload["JobInfo"]["Name"]))
|
||||
|
|
|
|||
233
pype/plugins/nuke/load/load_image.py
Normal file
233
pype/plugins/nuke/load/load_image.py
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
import re
|
||||
import nuke
|
||||
|
||||
from avalon.vendor import qargparse
|
||||
from avalon import api, io
|
||||
|
||||
from pype.hosts.nuke import presets
|
||||
|
||||
|
||||
class LoadImage(api.Loader):
|
||||
"""Load still image into Nuke"""
|
||||
|
||||
families = [
|
||||
"render2d", "source", "plate",
|
||||
"render", "prerender", "review",
|
||||
"image"
|
||||
]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "psd"]
|
||||
|
||||
label = "Load Image"
|
||||
order = -10
|
||||
icon = "image"
|
||||
color = "white"
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
"frame_number",
|
||||
label="Frame Number",
|
||||
default=int(nuke.root()["first_frame"].getValue()),
|
||||
min=1,
|
||||
max=999999,
|
||||
help="What frame is reading from?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
from avalon.nuke import (
|
||||
containerise,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
self.log.info("__ options: `{}`".format(options))
|
||||
frame_number = options.get("frame_number", 1)
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
repr_id = context["representation"]["_id"]
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repr_id))
|
||||
|
||||
last = first = int(frame_number)
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
repr_cont = context["representation"]["context"]
|
||||
frame = repr_cont.get("frame")
|
||||
if frame:
|
||||
padding = len(frame)
|
||||
file = file.replace(
|
||||
frame,
|
||||
format(frame_number, "0{}".format(padding)))
|
||||
|
||||
read_name = "Read_{0}_{1}_{2}".format(
|
||||
repr_cont["asset"],
|
||||
repr_cont["subset"],
|
||||
repr_cont["representation"])
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
r = nuke.createNode(
|
||||
"Read",
|
||||
"name {}".format(read_name))
|
||||
r["file"].setValue(file)
|
||||
|
||||
# Set colorspace defined in version data
|
||||
colorspace = context["version"]["data"].get("colorspace")
|
||||
if colorspace:
|
||||
r["colorspace"].setValue(str(colorspace))
|
||||
|
||||
# load nuke presets for Read's colorspace
|
||||
read_clrs_presets = presets.get_colorspace_preset().get(
|
||||
"nuke", {}).get("read", {})
|
||||
|
||||
# check if any colorspace presets for read is mathing
|
||||
preset_clrsp = next((read_clrs_presets[k]
|
||||
for k in read_clrs_presets
|
||||
if bool(re.search(k, file))),
|
||||
None)
|
||||
if preset_clrsp is not None:
|
||||
r["colorspace"].setValue(str(preset_clrsp))
|
||||
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
|
||||
# add additional metadata from the version to imprint Avalon knob
|
||||
add_keys = ["source", "colorspace", "author", "fps", "version"]
|
||||
|
||||
data_imprint = {
|
||||
"frameStart": first,
|
||||
"frameEnd": last
|
||||
}
|
||||
for k in add_keys:
|
||||
if k == 'version':
|
||||
data_imprint.update({k: context["version"]['name']})
|
||||
else:
|
||||
data_imprint.update(
|
||||
{k: context["version"]['data'].get(k, str(None))})
|
||||
|
||||
data_imprint.update({"objectName": read_name})
|
||||
|
||||
r["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
return containerise(r,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
from avalon.nuke import (
|
||||
update_container
|
||||
)
|
||||
|
||||
node = nuke.toNode(container["objectName"])
|
||||
frame_number = node["first"].value()
|
||||
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
repr_cont = representation["context"]
|
||||
|
||||
file = api.get_representation_path(representation)
|
||||
|
||||
if not file:
|
||||
repr_id = representation["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
||||
file = file.replace("\\", "/")
|
||||
|
||||
frame = repr_cont.get("frame")
|
||||
if frame:
|
||||
padding = len(frame)
|
||||
file = file.replace(
|
||||
frame,
|
||||
format(frame_number, "0{}".format(padding)))
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({
|
||||
"type": "version",
|
||||
"_id": representation["parent"]
|
||||
})
|
||||
|
||||
# get all versions in list
|
||||
versions = io.find({
|
||||
"type": "version",
|
||||
"parent": version["parent"]
|
||||
}).distinct('name')
|
||||
|
||||
max_version = max(versions)
|
||||
|
||||
version_data = version.get("data", {})
|
||||
|
||||
last = first = int(frame_number)
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
node["origfirst"].setValue(first)
|
||||
node["first"].setValue(first)
|
||||
node["origlast"].setValue(last)
|
||||
node["last"].setValue(last)
|
||||
|
||||
updated_dict = {}
|
||||
updated_dict.update({
|
||||
"representation": str(representation["_id"]),
|
||||
"frameStart": str(first),
|
||||
"frameEnd": str(last),
|
||||
"version": str(version.get("name")),
|
||||
"colorspace": version_data.get("colorspace"),
|
||||
"source": version_data.get("source"),
|
||||
"fps": str(version_data.get("fps")),
|
||||
"author": version_data.get("author"),
|
||||
"outputDir": version_data.get("outputDir"),
|
||||
})
|
||||
|
||||
# change color of node
|
||||
if version.get("name") not in [max_version]:
|
||||
node["tile_color"].setValue(int("0xd84f20ff", 16))
|
||||
else:
|
||||
node["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
node,
|
||||
updated_dict
|
||||
)
|
||||
self.log.info("udated to version: {}".format(version.get("name")))
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
|
||||
node = nuke.toNode(container['objectName'])
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -120,12 +120,12 @@ class LoadSequence(api.Loader):
|
|||
if "#" not in file:
|
||||
frame = repr_cont.get("frame")
|
||||
padding = len(frame)
|
||||
file = file.replace(frame, "#"*padding)
|
||||
file = file.replace(frame, "#" * padding)
|
||||
|
||||
read_name = "Read_{0}_{1}_{2}".format(
|
||||
repr_cont["asset"],
|
||||
repr_cont["subset"],
|
||||
repr_cont["representation"])
|
||||
repr_cont["asset"],
|
||||
repr_cont["subset"],
|
||||
repr_cont["representation"])
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
|
|
@ -250,7 +250,7 @@ class LoadSequence(api.Loader):
|
|||
if "#" not in file:
|
||||
frame = repr_cont.get("frame")
|
||||
padding = len(frame)
|
||||
file = file.replace(frame, "#"*padding)
|
||||
file = file.replace(frame, "#" * padding)
|
||||
|
||||
# Get start frame from version data
|
||||
version = io.find_one({
|
||||
|
|
@ -276,10 +276,10 @@ class LoadSequence(api.Loader):
|
|||
last = version_data.get("frameEnd")
|
||||
|
||||
if first is None:
|
||||
self.log.warning("Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(
|
||||
node['name'].value(), representation))
|
||||
self.log.warning(
|
||||
"Missing start frame for updated version"
|
||||
"assuming starts at frame 0 for: "
|
||||
"{} ({})".format(node['name'].value(), representation))
|
||||
first = 0
|
||||
|
||||
first -= self.handle_start
|
||||
|
|
|
|||
|
|
@ -15,10 +15,12 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
label = "Extract Thumbnail"
|
||||
|
||||
families = ["review", "render.farm"]
|
||||
families = ["review"]
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, instance):
|
||||
if "render.farm" in instance.data["families"]:
|
||||
return
|
||||
|
||||
with anlib.maintained_selection():
|
||||
self.log.debug("instance: {}".format(instance))
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
"label": subset,
|
||||
"name": subset,
|
||||
"family": in_data["family"],
|
||||
"version": in_data.get("version", 1),
|
||||
# "version": in_data.get("version", 1),
|
||||
"frameStart": in_data.get("representations", [None])[0].get(
|
||||
"frameStart", None
|
||||
),
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class CollectPsdInstances(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Psd Instances"
|
||||
order = pyblish.api.CollectorOrder + 0.492
|
||||
order = pyblish.api.CollectorOrder + 0.489
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["background_batch"]
|
||||
|
||||
|
|
@ -34,8 +34,6 @@ class CollectPsdInstances(pyblish.api.InstancePlugin):
|
|||
context = instance.context
|
||||
asset_data = instance.data["assetEntity"]
|
||||
asset_name = instance.data["asset"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
|
||||
for subset_name, subset_data in self.subsets.items():
|
||||
instance_name = f"{asset_name}_{subset_name}"
|
||||
task = subset_data.get("task", "background")
|
||||
|
|
@ -55,16 +53,8 @@ class CollectPsdInstances(pyblish.api.InstancePlugin):
|
|||
|
||||
new_instance.data["label"] = f"{instance_name}"
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["task"] = task
|
||||
|
||||
# fix anatomy data
|
||||
anatomy_data_new = copy.deepcopy(anatomy_data)
|
||||
# updating hierarchy data
|
||||
anatomy_data_new.update({
|
||||
"asset": asset_data["name"],
|
||||
"task": task,
|
||||
"subset": subset_name
|
||||
})
|
||||
new_instance.data["anatomyData"] = anatomy_data_new
|
||||
|
||||
if subset_name in self.unchecked_by_default:
|
||||
new_instance.data["publish"] = False
|
||||
|
|
|
|||
|
|
@ -526,7 +526,7 @@ def burnins_from_data(
|
|||
|
||||
bit_rate = burnin._streams[0].get("bit_rate")
|
||||
if bit_rate:
|
||||
ffmpeg_args.append("--b:v {}".format(bit_rate))
|
||||
ffmpeg_args.append("-b:v {}".format(bit_rate))
|
||||
|
||||
pix_fmt = burnin._streams[0].get("pix_fmt")
|
||||
if pix_fmt:
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
from Qt import QtWidgets, QtCore, QtGui
|
||||
from avalon import style
|
||||
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
from pype.api import resources
|
||||
|
||||
from avalon.tools import lib as tools_lib
|
||||
|
|
@ -251,7 +251,7 @@ class LauncherWindow(QtWidgets.QDialog):
|
|||
self.log = logging.getLogger(
|
||||
".".join([__name__, self.__class__.__name__])
|
||||
)
|
||||
self.dbcon = DbConnector()
|
||||
self.dbcon = AvalonMongoDB()
|
||||
|
||||
self.setWindowTitle("Launcher")
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from bson.objectid import ObjectId
|
||||
from Qt import QtWidgets, QtCore
|
||||
from widgets import AssetWidget, FamilyWidget, ComponentsWidget, ShadowWidget
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
||||
class Window(QtWidgets.QDialog):
|
||||
|
|
@ -10,7 +10,7 @@ class Window(QtWidgets.QDialog):
|
|||
:param parent: Main widget that cares about all GUIs
|
||||
:type parent: QtWidgets.QMainWindow
|
||||
"""
|
||||
_db = DbConnector()
|
||||
_db = AvalonMongoDB()
|
||||
_jobs = {}
|
||||
valid_family = False
|
||||
valid_components = False
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ class AssetWidget(QtWidgets.QWidget):
|
|||
self.combo_projects.clear()
|
||||
if len(projects) > 0:
|
||||
self.combo_projects.addItems(projects)
|
||||
self.dbcon.activate_project(projects[0])
|
||||
self.dbcon.Session["AVALON_PROJECT"] = projects[0]
|
||||
|
||||
def on_project_change(self):
|
||||
projects = list()
|
||||
|
|
@ -248,7 +248,7 @@ class AssetWidget(QtWidgets.QWidget):
|
|||
projects.append(project['name'])
|
||||
project_name = self.combo_projects.currentText()
|
||||
if project_name in projects:
|
||||
self.dbcon.activate_project(project_name)
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
self.refresh()
|
||||
|
||||
def _refresh_model(self):
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "2.11.5"
|
||||
__version__ = "2.12.0"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue