mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merged in hotfix/PYPE-501_fix_crashing_bugs (pull request #290)
Hotfix/PYPE-501 fix crashing bugs Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
commit
7dc7fbd345
14 changed files with 460 additions and 16 deletions
|
|
@ -2,7 +2,7 @@ import os
|
||||||
|
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction
|
from pype.ftrack import BaseAction
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
|
|
||||||
|
|
||||||
class AttributesRemapper(BaseAction):
|
class AttributesRemapper(BaseAction):
|
||||||
|
|
@ -275,7 +275,7 @@ class AttributesRemapper(BaseAction):
|
||||||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||||
items.append(message)
|
items.append(message)
|
||||||
|
|
||||||
self.show_interface(event, items, title)
|
self.show_interface(items=items, title=title, event=event)
|
||||||
|
|
||||||
def register(session, plugins_presets={}):
|
def register(session, plugins_presets={}):
|
||||||
'''Register plugin. Called when used as an plugin.'''
|
'''Register plugin. Called when used as an plugin.'''
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import re
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction
|
from pype.ftrack import BaseAction
|
||||||
from avalon import lib as avalonlib
|
from avalon import lib as avalonlib
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
from pypeapp import config, Anatomy
|
from pypeapp import config, Anatomy
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ from bson.objectid import ObjectId
|
||||||
import argparse
|
import argparse
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction
|
from pype.ftrack import BaseAction
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
|
|
||||||
|
|
||||||
class DeleteAsset(BaseAction):
|
class DeleteAsset(BaseAction):
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import logging
|
||||||
import argparse
|
import argparse
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction
|
from pype.ftrack import BaseAction
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
|
|
||||||
|
|
||||||
class AssetsRemover(BaseAction):
|
class AssetsRemover(BaseAction):
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import collections
|
||||||
|
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction, lib
|
from pype.ftrack import BaseAction, lib
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
from bson.objectid import ObjectId
|
from bson.objectid import ObjectId
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ import collections
|
||||||
from pypeapp import config
|
from pypeapp import config
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseAction, lib
|
from pype.ftrack import BaseAction, lib
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
from bson.objectid import ObjectId
|
from bson.objectid import ObjectId
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -222,7 +222,11 @@ class SyncHierarchicalAttrs(BaseAction):
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
if self.interface_messages:
|
if self.interface_messages:
|
||||||
self.show_interface_from_dict(self.interface_messages, event)
|
self.show_interface_from_dict(
|
||||||
|
messages=self.interface_messages,
|
||||||
|
title="something went wrong",
|
||||||
|
event=event
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from pype.ftrack import BaseAction, lib
|
||||||
from pype.vendor.ftrack_api import session as fa_session
|
from pype.vendor.ftrack_api import session as fa_session
|
||||||
|
|
||||||
|
|
||||||
class Sync_To_Avalon(BaseAction):
|
class SyncToAvalon(BaseAction):
|
||||||
'''
|
'''
|
||||||
Synchronizing data action - from Ftrack to Avalon DB
|
Synchronizing data action - from Ftrack to Avalon DB
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
|
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseEvent, lib
|
from pype.ftrack import BaseEvent, lib
|
||||||
|
|
@ -23,7 +23,10 @@ class SyncHierarchicalAttrs(BaseEvent):
|
||||||
if not keys:
|
if not keys:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
entity = session.get(ent['entity_type'], ent['entityId'])
|
if not ent['entityType'] in ['task', 'show']:
|
||||||
|
continue
|
||||||
|
|
||||||
|
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||||
processable.append(ent)
|
processable.append(ent)
|
||||||
processable_ent[ent['entityId']] = entity
|
processable_ent[ent['entityId']] = entity
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from pype.vendor import ftrack_api
|
from pype.vendor import ftrack_api
|
||||||
from pype.ftrack import BaseEvent, lib
|
from pype.ftrack import BaseEvent, lib
|
||||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||||
from bson.objectid import ObjectId
|
from bson.objectid import ObjectId
|
||||||
from pypeapp import config
|
from pypeapp import config
|
||||||
from pypeapp import Anatomy
|
from pypeapp import Anatomy
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,7 @@ class FtrackServer:
|
||||||
msg = 'Loading of file "{}" failed ({})'.format(
|
msg = 'Loading of file "{}" failed ({})'.format(
|
||||||
file, str(e)
|
file, str(e)
|
||||||
)
|
)
|
||||||
log.warning(msg)
|
log.warning(msg, exc_info=e)
|
||||||
|
|
||||||
if len(register_functions_dict) < 1:
|
if len(register_functions_dict) < 1:
|
||||||
raise Exception
|
raise Exception
|
||||||
|
|
|
||||||
433
pype/ftrack/lib/io_nonsingleton.py
Normal file
433
pype/ftrack/lib/io_nonsingleton.py
Normal file
|
|
@ -0,0 +1,433 @@
|
||||||
|
"""
|
||||||
|
Wrapper around interactions with the database
|
||||||
|
|
||||||
|
Copy of io module in avalon-core.
|
||||||
|
- In this case not working as singleton with api.Session!
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import errno
|
||||||
|
import shutil
|
||||||
|
import logging
|
||||||
|
import tempfile
|
||||||
|
import functools
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
from avalon import schema
|
||||||
|
from avalon.vendor import requests
|
||||||
|
|
||||||
|
# Third-party dependencies
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
|
||||||
|
def auto_reconnect(func):
|
||||||
|
"""Handling auto reconnect in 3 retry times"""
|
||||||
|
@functools.wraps(func)
|
||||||
|
def decorated(*args, **kwargs):
|
||||||
|
object = args[0]
|
||||||
|
for retry in range(3):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except pymongo.errors.AutoReconnect:
|
||||||
|
object.log.error("Reconnecting..")
|
||||||
|
time.sleep(0.1)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
return decorated
|
||||||
|
|
||||||
|
|
||||||
|
class DbConnector(object):
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.Session = {}
|
||||||
|
self._mongo_client = None
|
||||||
|
self._sentry_client = None
|
||||||
|
self._sentry_logging_handler = None
|
||||||
|
self._database = None
|
||||||
|
self._is_installed = False
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
"""Establish a persistent connection to the database"""
|
||||||
|
if self._is_installed:
|
||||||
|
return
|
||||||
|
|
||||||
|
logging.basicConfig()
|
||||||
|
self.Session.update(self._from_environment())
|
||||||
|
|
||||||
|
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||||
|
self._mongo_client = pymongo.MongoClient(
|
||||||
|
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||||
|
|
||||||
|
for retry in range(3):
|
||||||
|
try:
|
||||||
|
t1 = time.time()
|
||||||
|
self._mongo_client.server_info()
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self.log.error("Retrying..")
|
||||||
|
time.sleep(1)
|
||||||
|
timeout *= 1.5
|
||||||
|
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise IOError(
|
||||||
|
"ERROR: Couldn't connect to %s in "
|
||||||
|
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||||
|
|
||||||
|
self.log.info("Connected to %s, delay %.3f s" % (
|
||||||
|
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||||
|
|
||||||
|
self._install_sentry()
|
||||||
|
|
||||||
|
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||||
|
self._is_installed = True
|
||||||
|
|
||||||
|
def _install_sentry(self):
|
||||||
|
if "AVALON_SENTRY" not in self.Session:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
from raven import Client
|
||||||
|
from raven.handlers.logging import SentryHandler
|
||||||
|
from raven.conf import setup_logging
|
||||||
|
except ImportError:
|
||||||
|
# Note: There was a Sentry address in this Session
|
||||||
|
return self.log.warning("Sentry disabled, raven not installed")
|
||||||
|
|
||||||
|
client = Client(self.Session["AVALON_SENTRY"])
|
||||||
|
|
||||||
|
# Transmit log messages to Sentry
|
||||||
|
handler = SentryHandler(client)
|
||||||
|
handler.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
setup_logging(handler)
|
||||||
|
|
||||||
|
self._sentry_client = client
|
||||||
|
self._sentry_logging_handler = handler
|
||||||
|
self.log.info(
|
||||||
|
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _from_environment(self):
|
||||||
|
Session = {
|
||||||
|
item[0]: os.getenv(item[0], item[1])
|
||||||
|
for item in (
|
||||||
|
# Root directory of projects on disk
|
||||||
|
("AVALON_PROJECTS", None),
|
||||||
|
|
||||||
|
# Name of current Project
|
||||||
|
("AVALON_PROJECT", ""),
|
||||||
|
|
||||||
|
# Name of current Asset
|
||||||
|
("AVALON_ASSET", ""),
|
||||||
|
|
||||||
|
# Name of current silo
|
||||||
|
("AVALON_SILO", ""),
|
||||||
|
|
||||||
|
# Name of current task
|
||||||
|
("AVALON_TASK", None),
|
||||||
|
|
||||||
|
# Name of current app
|
||||||
|
("AVALON_APP", None),
|
||||||
|
|
||||||
|
# Path to working directory
|
||||||
|
("AVALON_WORKDIR", None),
|
||||||
|
|
||||||
|
# Name of current Config
|
||||||
|
# TODO(marcus): Establish a suitable default config
|
||||||
|
("AVALON_CONFIG", "no_config"),
|
||||||
|
|
||||||
|
# Name of Avalon in graphical user interfaces
|
||||||
|
# Use this to customise the visual appearance of Avalon
|
||||||
|
# to better integrate with your surrounding pipeline
|
||||||
|
("AVALON_LABEL", "Avalon"),
|
||||||
|
|
||||||
|
# Used during any connections to the outside world
|
||||||
|
("AVALON_TIMEOUT", "1000"),
|
||||||
|
|
||||||
|
# Address to Asset Database
|
||||||
|
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||||
|
|
||||||
|
# Name of database used in MongoDB
|
||||||
|
("AVALON_DB", "avalon"),
|
||||||
|
|
||||||
|
# Address to Sentry
|
||||||
|
("AVALON_SENTRY", None),
|
||||||
|
|
||||||
|
# Address to Deadline Web Service
|
||||||
|
# E.g. http://192.167.0.1:8082
|
||||||
|
("AVALON_DEADLINE", None),
|
||||||
|
|
||||||
|
# Enable features not necessarily stable. The user's own risk
|
||||||
|
("AVALON_EARLY_ADOPTER", None),
|
||||||
|
|
||||||
|
# Address of central asset repository, contains
|
||||||
|
# the following interface:
|
||||||
|
# /upload
|
||||||
|
# /download
|
||||||
|
# /manager (optional)
|
||||||
|
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||||
|
|
||||||
|
# Boolean of whether to upload published material
|
||||||
|
# to central asset repository
|
||||||
|
("AVALON_UPLOAD", None),
|
||||||
|
|
||||||
|
# Generic username and password
|
||||||
|
("AVALON_USERNAME", "avalon"),
|
||||||
|
("AVALON_PASSWORD", "secret"),
|
||||||
|
|
||||||
|
# Unique identifier for instances in working files
|
||||||
|
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||||
|
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||||
|
|
||||||
|
# Enable debugging
|
||||||
|
("AVALON_DEBUG", None),
|
||||||
|
|
||||||
|
) if os.getenv(item[0], item[1]) is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
Session["schema"] = "avalon-core:session-1.0"
|
||||||
|
try:
|
||||||
|
schema.validate(Session)
|
||||||
|
except schema.ValidationError as e:
|
||||||
|
# TODO(marcus): Make this mandatory
|
||||||
|
self.log.warning(e)
|
||||||
|
|
||||||
|
return Session
|
||||||
|
|
||||||
|
def uninstall(self):
|
||||||
|
"""Close any connection to the database"""
|
||||||
|
try:
|
||||||
|
self._mongo_client.close()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self._mongo_client = None
|
||||||
|
self._database = None
|
||||||
|
self._is_installed = False
|
||||||
|
|
||||||
|
def active_project(self):
|
||||||
|
"""Return the name of the active project"""
|
||||||
|
return self.Session["AVALON_PROJECT"]
|
||||||
|
|
||||||
|
def activate_project(self, project_name):
|
||||||
|
self.Session["AVALON_PROJECT"] = project_name
|
||||||
|
|
||||||
|
def projects(self):
|
||||||
|
"""List available projects
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list of project documents
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
collection_names = self.collections()
|
||||||
|
for project in collection_names:
|
||||||
|
if project in ("system.indexes",):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Each collection will have exactly one project document
|
||||||
|
document = self.find_project(project)
|
||||||
|
|
||||||
|
if document is not None:
|
||||||
|
yield document
|
||||||
|
|
||||||
|
def locate(self, path):
|
||||||
|
"""Traverse a hierarchy from top-to-bottom
|
||||||
|
|
||||||
|
Example:
|
||||||
|
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
representation (ObjectId)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
components = zip(
|
||||||
|
("project", "asset", "subset", "version", "representation"),
|
||||||
|
path
|
||||||
|
)
|
||||||
|
|
||||||
|
parent = None
|
||||||
|
for type_, name in components:
|
||||||
|
latest = (type_ == "version") and name in (None, -1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if latest:
|
||||||
|
parent = self.find_one(
|
||||||
|
filter={
|
||||||
|
"type": type_,
|
||||||
|
"parent": parent
|
||||||
|
},
|
||||||
|
projection={"_id": 1},
|
||||||
|
sort=[("name", -1)]
|
||||||
|
)["_id"]
|
||||||
|
else:
|
||||||
|
parent = self.find_one(
|
||||||
|
filter={
|
||||||
|
"type": type_,
|
||||||
|
"name": name,
|
||||||
|
"parent": parent
|
||||||
|
},
|
||||||
|
projection={"_id": 1},
|
||||||
|
)["_id"]
|
||||||
|
|
||||||
|
except TypeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return parent
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def collections(self):
|
||||||
|
return self._database.collection_names()
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def find_project(self, project):
|
||||||
|
return self._database[project].find_one({"type": "project"})
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def insert_one(self, item):
|
||||||
|
assert isinstance(item, dict), "item must be of type <dict>"
|
||||||
|
schema.validate(item)
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def insert_many(self, items, ordered=True):
|
||||||
|
# check if all items are valid
|
||||||
|
assert isinstance(items, list), "`items` must be of type <list>"
|
||||||
|
for item in items:
|
||||||
|
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||||
|
schema.validate(item)
|
||||||
|
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||||
|
items,
|
||||||
|
ordered=ordered)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def find(self, filter, projection=None, sort=None):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||||
|
filter=filter,
|
||||||
|
projection=projection,
|
||||||
|
sort=sort
|
||||||
|
)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def find_one(self, filter, projection=None, sort=None):
|
||||||
|
assert isinstance(filter, dict), "filter must be <dict>"
|
||||||
|
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||||
|
filter=filter,
|
||||||
|
projection=projection,
|
||||||
|
sort=sort
|
||||||
|
)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||||
|
*args, **kwargs)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def replace_one(self, filter, replacement):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||||
|
filter, replacement)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def update_many(self, filter, update):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||||
|
filter, update)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def distinct(self, *args, **kwargs):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||||
|
*args, **kwargs)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def drop(self, *args, **kwargs):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||||
|
*args, **kwargs)
|
||||||
|
|
||||||
|
@auto_reconnect
|
||||||
|
def delete_many(self, *args, **kwargs):
|
||||||
|
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||||
|
*args, **kwargs)
|
||||||
|
|
||||||
|
def parenthood(self, document):
|
||||||
|
assert document is not None, "This is a bug"
|
||||||
|
|
||||||
|
parents = list()
|
||||||
|
|
||||||
|
while document.get("parent") is not None:
|
||||||
|
document = self.find_one({"_id": document["parent"]})
|
||||||
|
|
||||||
|
if document is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
parents.append(document)
|
||||||
|
|
||||||
|
return parents
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def tempdir(self):
|
||||||
|
tempdir = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
yield tempdir
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tempdir)
|
||||||
|
|
||||||
|
def download(self, src, dst):
|
||||||
|
"""Download `src` to `dst`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
src (str): URL to source file
|
||||||
|
dst (str): Absolute path to destination file
|
||||||
|
|
||||||
|
Yields tuple (progress, error):
|
||||||
|
progress (int): Between 0-100
|
||||||
|
error (Exception): Any exception raised when first making connection
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
src,
|
||||||
|
stream=True,
|
||||||
|
auth=requests.auth.HTTPBasicAuth(
|
||||||
|
self.Session["AVALON_USERNAME"],
|
||||||
|
self.Session["AVALON_PASSWORD"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except requests.ConnectionError as e:
|
||||||
|
yield None, e
|
||||||
|
return
|
||||||
|
|
||||||
|
with self.tempdir() as dirname:
|
||||||
|
tmp = os.path.join(dirname, os.path.basename(src))
|
||||||
|
|
||||||
|
with open(tmp, "wb") as f:
|
||||||
|
total_length = response.headers.get("content-length")
|
||||||
|
|
||||||
|
if total_length is None: # no content length header
|
||||||
|
f.write(response.content)
|
||||||
|
else:
|
||||||
|
downloaded = 0
|
||||||
|
total_length = int(total_length)
|
||||||
|
for data in response.iter_content(chunk_size=4096):
|
||||||
|
downloaded += len(data)
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
yield int(100.0 * downloaded / total_length), None
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(dst))
|
||||||
|
except OSError as e:
|
||||||
|
# An already existing destination directory is fine.
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
|
||||||
|
shutil.copy(tmp, dst)
|
||||||
|
|
@ -31,7 +31,9 @@ def _subprocess(args):
|
||||||
output = proc.communicate()[0]
|
output = proc.communicate()[0]
|
||||||
|
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
|
log.error(output)
|
||||||
raise ValueError("\"{}\" was not successful: {}".format(args, output))
|
raise ValueError("\"{}\" was not successful: {}".format(args, output))
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def get_hierarchy(asset_name=None):
|
def get_hierarchy(asset_name=None):
|
||||||
|
|
|
||||||
|
|
@ -94,7 +94,8 @@ class ExtractBurnin(pype.api.Extractor):
|
||||||
|
|
||||||
args = [executable, scriptpath, json_data]
|
args = [executable, scriptpath, json_data]
|
||||||
self.log.debug("Executing: {}".format(args))
|
self.log.debug("Executing: {}".format(args))
|
||||||
pype.api.subprocess(args)
|
output = pype.api.subprocess(args)
|
||||||
|
self.log.debug("Output: {}".format(output))
|
||||||
|
|
||||||
repre_update = {
|
repre_update = {
|
||||||
"files": movieFileBurnin,
|
"files": movieFileBurnin,
|
||||||
|
|
|
||||||
|
|
@ -170,8 +170,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
subprcs_cmd = " ".join(mov_args)
|
subprcs_cmd = " ".join(mov_args)
|
||||||
|
|
||||||
# run subprocess
|
# run subprocess
|
||||||
self.log.debug("{}".format(subprcs_cmd))
|
self.log.debug("Executing: {}".format(subprcs_cmd))
|
||||||
pype.api.subprocess(subprcs_cmd)
|
output = pype.api.subprocess(subprcs_cmd)
|
||||||
|
self.log.debug("Output: {}".format(output))
|
||||||
|
|
||||||
# create representation data
|
# create representation data
|
||||||
repre_new.update({
|
repre_new.update({
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue