mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
ftrack-api-python3 dependencies to pype.vendor
This commit is contained in:
parent
0bdc5d6e71
commit
776af26cb6
10 changed files with 1899 additions and 0 deletions
292
pype/vendor/clique/__init__.py
vendored
Normal file
292
pype/vendor/clique/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,292 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
|
||||||
|
# :license: See LICENSE.txt.
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from ._version import __version__
|
||||||
|
from .collection import Collection
|
||||||
|
from .error import CollectionError
|
||||||
|
# was changed for ftrack-api
|
||||||
|
from six import string_types
|
||||||
|
|
||||||
|
|
||||||
|
#: Pattern for matching an index with optional padding.
|
||||||
|
DIGITS_PATTERN = '(?P<index>(?P<padding>0*)\d+)'
|
||||||
|
|
||||||
|
#: Common patterns that can be passed to :py:func:`~clique.assemble`.
|
||||||
|
PATTERNS = {
|
||||||
|
'frames': '\.{0}\.\D+\d?$'.format(DIGITS_PATTERN),
|
||||||
|
'versions': 'v{0}'.format(DIGITS_PATTERN)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def split(fname):
|
||||||
|
'''Split `fname` into ({head}, {index} and {tail})
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> split('rs_beauty.1000.png')
|
||||||
|
('rs_beauty.', '1000', '.png')
|
||||||
|
>>> split('myRender.0100.png')
|
||||||
|
('myRender.', '0100', '.png')
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
try:
|
||||||
|
collections, _ = assemble([fname], minimum_items=1)
|
||||||
|
except IndexError:
|
||||||
|
raise ValueError("No collection found")
|
||||||
|
else:
|
||||||
|
# Search for indexes starting from end, as opposed to start
|
||||||
|
# E.g. myRender2017.001.png -> myRender2017.%03d.png
|
||||||
|
# As opposed to -> myRender%d.%001.png
|
||||||
|
col = collections[-1]
|
||||||
|
idx = list(col.indexes)[0]
|
||||||
|
|
||||||
|
return (col.head,
|
||||||
|
str(idx).zfill(col.padding),
|
||||||
|
col.tail)
|
||||||
|
|
||||||
|
|
||||||
|
def assemble(iterable, patterns=None, minimum_items=2, case_sensitive=True):
|
||||||
|
'''Assemble items in *iterable* into discreet collections.
|
||||||
|
|
||||||
|
*patterns* may be specified as a list of regular expressions to limit
|
||||||
|
the returned collection possibilities. Use this when interested in
|
||||||
|
collections that only match specific patterns. Each pattern must contain
|
||||||
|
the expression from :py:data:`DIGITS_PATTERN` exactly once.
|
||||||
|
|
||||||
|
A selection of common expressions are available in :py:data:`PATTERNS`.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If a pattern is supplied as a string it will be automatically compiled
|
||||||
|
to a :py:class:`re.RegexObject` instance for convenience.
|
||||||
|
|
||||||
|
When *patterns* is not specified, collections are formed by examining all
|
||||||
|
possible groupings of the items in *iterable* based around common numerical
|
||||||
|
components.
|
||||||
|
|
||||||
|
*minimum_items* dictates the minimum number of items a collection must have
|
||||||
|
in order to be included in the result. The default is 2, filtering out
|
||||||
|
single item collections.
|
||||||
|
|
||||||
|
If *case_sensitive* is False, then items will be treated as part of the same
|
||||||
|
collection when they only differ in casing. To avoid ambiguity, the
|
||||||
|
resulting collection will always be lowercase. For example, "item.0001.dpx"
|
||||||
|
and "Item.0002.dpx" would be part of the same collection, "item.%04d.dpx".
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Any compiled *patterns* will also respect the set case sensitivity.
|
||||||
|
|
||||||
|
Return tuple of two lists (collections, remainder) where 'collections' is a
|
||||||
|
list of assembled :py:class:`~clique.collection.Collection` instances and
|
||||||
|
'remainder' is a list of items that did not belong to any collection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
collection_map = defaultdict(set)
|
||||||
|
collections = []
|
||||||
|
remainder = []
|
||||||
|
|
||||||
|
# Compile patterns.
|
||||||
|
flags = 0
|
||||||
|
if not case_sensitive:
|
||||||
|
flags |= re.IGNORECASE
|
||||||
|
|
||||||
|
compiled_patterns = []
|
||||||
|
|
||||||
|
if patterns is not None:
|
||||||
|
if not patterns:
|
||||||
|
return collections, list(iterable)
|
||||||
|
|
||||||
|
for pattern in patterns:
|
||||||
|
if isinstance(pattern, string_types):
|
||||||
|
compiled_patterns.append(re.compile(pattern, flags=flags))
|
||||||
|
else:
|
||||||
|
compiled_patterns.append(pattern)
|
||||||
|
|
||||||
|
else:
|
||||||
|
compiled_patterns.append(re.compile(DIGITS_PATTERN, flags=flags))
|
||||||
|
|
||||||
|
# Process iterable.
|
||||||
|
for item in iterable:
|
||||||
|
matched = False
|
||||||
|
|
||||||
|
for pattern in compiled_patterns:
|
||||||
|
for match in pattern.finditer(item):
|
||||||
|
index = match.group('index')
|
||||||
|
|
||||||
|
head = item[:match.start('index')]
|
||||||
|
tail = item[match.end('index'):]
|
||||||
|
|
||||||
|
if not case_sensitive:
|
||||||
|
head = head.lower()
|
||||||
|
tail = tail.lower()
|
||||||
|
|
||||||
|
padding = match.group('padding')
|
||||||
|
if padding:
|
||||||
|
padding = len(index)
|
||||||
|
else:
|
||||||
|
padding = 0
|
||||||
|
|
||||||
|
key = (head, tail, padding)
|
||||||
|
collection_map[key].add(int(index))
|
||||||
|
matched = True
|
||||||
|
|
||||||
|
if not matched:
|
||||||
|
remainder.append(item)
|
||||||
|
|
||||||
|
# Form collections.
|
||||||
|
merge_candidates = []
|
||||||
|
for (head, tail, padding), indexes in collection_map.items():
|
||||||
|
collection = Collection(head, tail, padding, indexes)
|
||||||
|
collections.append(collection)
|
||||||
|
|
||||||
|
if collection.padding == 0:
|
||||||
|
merge_candidates.append(collection)
|
||||||
|
|
||||||
|
# Merge together collections that align on padding boundaries. For example,
|
||||||
|
# 0998-0999 and 1000-1001 can be merged into 0998-1001. Note that only
|
||||||
|
# indexes within the padding width limit are merged. If a collection is
|
||||||
|
# entirely merged into another then it will not be included as a separate
|
||||||
|
# collection in the results.
|
||||||
|
fully_merged = []
|
||||||
|
for collection in collections:
|
||||||
|
if collection.padding == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for candidate in merge_candidates:
|
||||||
|
if (candidate.head == collection.head and
|
||||||
|
candidate.tail == collection.tail):
|
||||||
|
|
||||||
|
merged_index_count = 0
|
||||||
|
for index in candidate.indexes:
|
||||||
|
if len(str(abs(index))) == collection.padding:
|
||||||
|
collection.indexes.add(index)
|
||||||
|
merged_index_count += 1
|
||||||
|
|
||||||
|
if merged_index_count == len(candidate.indexes):
|
||||||
|
fully_merged.append(candidate)
|
||||||
|
|
||||||
|
# Filter out fully merged collections.
|
||||||
|
collections = [collection for collection in collections
|
||||||
|
if collection not in fully_merged]
|
||||||
|
|
||||||
|
# Filter out collections that do not have at least as many indexes as
|
||||||
|
# minimum_items. In addition, add any members of a filtered collection,
|
||||||
|
# which are not members of an unfiltered collection, to the remainder.
|
||||||
|
filtered = []
|
||||||
|
remainder_candidates = []
|
||||||
|
for collection in collections:
|
||||||
|
if len(collection.indexes) >= minimum_items:
|
||||||
|
filtered.append(collection)
|
||||||
|
else:
|
||||||
|
for member in collection:
|
||||||
|
remainder_candidates.append(member)
|
||||||
|
|
||||||
|
for candidate in remainder_candidates:
|
||||||
|
# Check if candidate has already been added to remainder to avoid
|
||||||
|
# duplicate entries.
|
||||||
|
if candidate in remainder:
|
||||||
|
continue
|
||||||
|
|
||||||
|
has_membership = False
|
||||||
|
|
||||||
|
for collection in filtered:
|
||||||
|
if candidate in collection:
|
||||||
|
has_membership = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not has_membership:
|
||||||
|
remainder.append(candidate)
|
||||||
|
|
||||||
|
return filtered, remainder
|
||||||
|
|
||||||
|
|
||||||
|
def parse(value, pattern='{head}{padding}{tail} [{ranges}]'):
|
||||||
|
'''Parse *value* into a :py:class:`~clique.collection.Collection`.
|
||||||
|
|
||||||
|
Use *pattern* to extract information from *value*. It may make use of the
|
||||||
|
following keys:
|
||||||
|
|
||||||
|
* *head* - Common leading part of the collection.
|
||||||
|
* *tail* - Common trailing part of the collection.
|
||||||
|
* *padding* - Padding value in ``%0d`` format.
|
||||||
|
* *range* - Total range in the form ``start-end``.
|
||||||
|
* *ranges* - Comma separated ranges of indexes.
|
||||||
|
* *holes* - Comma separated ranges of missing indexes.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
*holes* only makes sense if *range* or *ranges* is also present.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Construct regular expression for given pattern.
|
||||||
|
expressions = {
|
||||||
|
'head': '(?P<head>.*)',
|
||||||
|
'tail': '(?P<tail>.*)',
|
||||||
|
'padding': '%(?P<padding>\d*)d',
|
||||||
|
'range': '(?P<range>\d+-\d+)?',
|
||||||
|
'ranges': '(?P<ranges>[\d ,\-]+)?',
|
||||||
|
'holes': '(?P<holes>[\d ,\-]+)'
|
||||||
|
}
|
||||||
|
|
||||||
|
pattern_regex = re.escape(pattern)
|
||||||
|
for key, expression in expressions.items():
|
||||||
|
pattern_regex = pattern_regex.replace(
|
||||||
|
'\{{{0}\}}'.format(key),
|
||||||
|
expression
|
||||||
|
)
|
||||||
|
pattern_regex = '^{0}$'.format(pattern_regex)
|
||||||
|
|
||||||
|
# Match pattern against value and use results to construct collection.
|
||||||
|
match = re.search(pattern_regex, value)
|
||||||
|
if match is None:
|
||||||
|
raise ValueError('Value did not match pattern.')
|
||||||
|
|
||||||
|
groups = match.groupdict()
|
||||||
|
if 'padding' in groups and groups['padding']:
|
||||||
|
groups['padding'] = int(groups['padding'])
|
||||||
|
else:
|
||||||
|
groups['padding'] = 0
|
||||||
|
|
||||||
|
# Create collection and then add indexes.
|
||||||
|
collection = Collection(
|
||||||
|
groups.get('head', ''),
|
||||||
|
groups.get('tail', ''),
|
||||||
|
groups['padding']
|
||||||
|
)
|
||||||
|
|
||||||
|
if groups.get('range', None) is not None:
|
||||||
|
start, end = map(int, groups['range'].split('-'))
|
||||||
|
collection.indexes.update(range(start, end + 1))
|
||||||
|
|
||||||
|
if groups.get('ranges', None) is not None:
|
||||||
|
parts = [part.strip() for part in groups['ranges'].split(',')]
|
||||||
|
for part in parts:
|
||||||
|
index_range = list(map(int, part.split('-', 2)))
|
||||||
|
|
||||||
|
if len(index_range) > 1:
|
||||||
|
# Index range.
|
||||||
|
for index in range(index_range[0], index_range[1] + 1):
|
||||||
|
collection.indexes.add(index)
|
||||||
|
else:
|
||||||
|
# Single index.
|
||||||
|
collection.indexes.add(index_range[0])
|
||||||
|
|
||||||
|
if 'holes' in groups:
|
||||||
|
parts = [part.strip() for part in groups['holes'].split(',')]
|
||||||
|
for part in parts:
|
||||||
|
index_range = map(int, part.split('-', 2))
|
||||||
|
|
||||||
|
if len(index_range) > 1:
|
||||||
|
# Index range.
|
||||||
|
for index in range(index_range[0], index_range[1] + 1):
|
||||||
|
collection.indexes.remove(index)
|
||||||
|
else:
|
||||||
|
# Single index.
|
||||||
|
collection.indexes.remove(index_range[0])
|
||||||
|
|
||||||
|
return collection
|
||||||
2
pype/vendor/clique/_version.py
vendored
Normal file
2
pype/vendor/clique/_version.py
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
__version__ = '1.3.1'
|
||||||
|
|
||||||
383
pype/vendor/clique/collection.py
vendored
Normal file
383
pype/vendor/clique/collection.py
vendored
Normal file
|
|
@ -0,0 +1,383 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
|
||||||
|
# :license: See LICENSE.txt.
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from . import descriptor, error, sorted_set
|
||||||
|
|
||||||
|
|
||||||
|
class Collection(object):
|
||||||
|
'''Represent group of items that differ only by numerical component.'''
|
||||||
|
|
||||||
|
indexes = descriptor.Unsettable('indexes')
|
||||||
|
|
||||||
|
def __init__(self, head, tail, padding, indexes=None):
|
||||||
|
'''Initialise collection.
|
||||||
|
|
||||||
|
*head* is the leading common part whilst *tail* is the trailing
|
||||||
|
common part.
|
||||||
|
|
||||||
|
*padding* specifies the "width" of the numerical component. An index
|
||||||
|
will be padded with zeros to fill this width. A *padding* of zero
|
||||||
|
implies no padding and width may be any size so long as no leading
|
||||||
|
zeros are present.
|
||||||
|
|
||||||
|
*indexes* can specify a set of numerical indexes to initially populate
|
||||||
|
the collection with.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
After instantiation, the ``indexes`` attribute cannot be set to a
|
||||||
|
new value using assignment::
|
||||||
|
|
||||||
|
>>> collection.indexes = [1, 2, 3]
|
||||||
|
AttributeError: Cannot set attribute defined as unsettable.
|
||||||
|
|
||||||
|
Instead, manipulate it directly::
|
||||||
|
|
||||||
|
>>> collection.indexes.clear()
|
||||||
|
>>> collection.indexes.update([1, 2, 3])
|
||||||
|
|
||||||
|
'''
|
||||||
|
super(Collection, self).__init__()
|
||||||
|
self.__dict__['indexes'] = sorted_set.SortedSet()
|
||||||
|
self._head = head
|
||||||
|
self._tail = tail
|
||||||
|
self.padding = padding
|
||||||
|
self._update_expression()
|
||||||
|
|
||||||
|
if indexes is not None:
|
||||||
|
self.indexes.update(indexes)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def head(self):
|
||||||
|
'''Return common leading part.'''
|
||||||
|
return self._head
|
||||||
|
|
||||||
|
@head.setter
|
||||||
|
def head(self, value):
|
||||||
|
'''Set common leading part to *value*.'''
|
||||||
|
self._head = value
|
||||||
|
self._update_expression()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tail(self):
|
||||||
|
'''Return common trailing part.'''
|
||||||
|
return self._tail
|
||||||
|
|
||||||
|
@tail.setter
|
||||||
|
def tail(self, value):
|
||||||
|
'''Set common trailing part to *value*.'''
|
||||||
|
self._tail = value
|
||||||
|
self._update_expression()
|
||||||
|
|
||||||
|
def _update_expression(self):
|
||||||
|
'''Update internal expression.'''
|
||||||
|
self._expression = re.compile(
|
||||||
|
'^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
|
||||||
|
.format(re.escape(self.head), re.escape(self.tail))
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''Return string represenation.'''
|
||||||
|
return self.format()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
'''Return representation.'''
|
||||||
|
return '<{0} "{1}">'.format(self.__class__.__name__, self)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
'''Return iterator over items in collection.'''
|
||||||
|
for index in self.indexes:
|
||||||
|
formatted_index = '{0:0{1}d}'.format(index, self.padding)
|
||||||
|
item = '{0}{1}{2}'.format(self.head, formatted_index, self.tail)
|
||||||
|
yield item
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
'''Return whether *item* is present in collection.'''
|
||||||
|
match = self.match(item)
|
||||||
|
if not match:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not int(match.group('index')) in self.indexes:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
'''Return whether *other* collection is equal.'''
|
||||||
|
if not isinstance(other, Collection):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return all([
|
||||||
|
other.head == self.head,
|
||||||
|
other.tail == self.tail,
|
||||||
|
other.padding == self.padding,
|
||||||
|
other.indexes == self.indexes
|
||||||
|
])
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
'''Return whether *other* collection is not equal.'''
|
||||||
|
result = self.__eq__(other)
|
||||||
|
if result is NotImplemented:
|
||||||
|
return result
|
||||||
|
|
||||||
|
return not result
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
'''Return whether *other* collection is greater than.'''
|
||||||
|
if not isinstance(other, Collection):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
a = (self.head, self.tail, self.padding, len(self.indexes))
|
||||||
|
b = (other.head, other.tail, other.padding, len(other.indexes))
|
||||||
|
|
||||||
|
return a > b
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
'''Return whether *other* collection is less than.'''
|
||||||
|
result = self.__gt__(other)
|
||||||
|
if result is NotImplemented:
|
||||||
|
return result
|
||||||
|
|
||||||
|
return not result
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
'''Return whether *other* collection is greater than or equal.'''
|
||||||
|
result = self.__eq__(other)
|
||||||
|
if result is NotImplemented:
|
||||||
|
return result
|
||||||
|
|
||||||
|
if result is False:
|
||||||
|
result = self.__gt__(other)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
'''Return whether *other* collection is less than or equal.'''
|
||||||
|
result = self.__eq__(other)
|
||||||
|
if result is NotImplemented:
|
||||||
|
return result
|
||||||
|
|
||||||
|
if result is False:
|
||||||
|
result = self.__lt__(other)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def match(self, item):
|
||||||
|
'''Return whether *item* matches this collection expression.
|
||||||
|
|
||||||
|
If a match is successful return data about the match otherwise return
|
||||||
|
None.
|
||||||
|
|
||||||
|
'''
|
||||||
|
match = self._expression.match(item)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
index = match.group('index')
|
||||||
|
padded = False
|
||||||
|
if match.group('padding'):
|
||||||
|
padded = True
|
||||||
|
|
||||||
|
if self.padding == 0:
|
||||||
|
if padded:
|
||||||
|
return None
|
||||||
|
|
||||||
|
elif len(index) != self.padding:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return match
|
||||||
|
|
||||||
|
def add(self, item):
|
||||||
|
'''Add *item* to collection.
|
||||||
|
|
||||||
|
raise :py:class:`~error.CollectionError` if *item* cannot be
|
||||||
|
added to the collection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
match = self.match(item)
|
||||||
|
if match is None:
|
||||||
|
raise error.CollectionError(
|
||||||
|
'Item does not match collection expression.'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.indexes.add(int(match.group('index')))
|
||||||
|
|
||||||
|
def remove(self, item):
|
||||||
|
'''Remove *item* from collection.
|
||||||
|
|
||||||
|
raise :py:class:`~error.CollectionError` if *item* cannot be
|
||||||
|
removed from the collection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
match = self.match(item)
|
||||||
|
if match is None:
|
||||||
|
raise error.CollectionError(
|
||||||
|
'Item not present in collection.'
|
||||||
|
)
|
||||||
|
|
||||||
|
index = int(match.group('index'))
|
||||||
|
try:
|
||||||
|
self.indexes.remove(index)
|
||||||
|
except KeyError:
|
||||||
|
raise error.CollectionError(
|
||||||
|
'Item not present in collection.'
|
||||||
|
)
|
||||||
|
|
||||||
|
def format(self, pattern='{head}{padding}{tail} [{ranges}]'):
|
||||||
|
'''Return string representation as specified by *pattern*.
|
||||||
|
|
||||||
|
Pattern can be any format accepted by Python's standard format function
|
||||||
|
and will receive the following keyword arguments as context:
|
||||||
|
|
||||||
|
* *head* - Common leading part of the collection.
|
||||||
|
* *tail* - Common trailing part of the collection.
|
||||||
|
* *padding* - Padding value in ``%0d`` format.
|
||||||
|
* *range* - Total range in the form ``start-end``
|
||||||
|
* *ranges* - Comma separated ranges of indexes.
|
||||||
|
* *holes* - Comma separated ranges of missing indexes.
|
||||||
|
|
||||||
|
'''
|
||||||
|
data = {}
|
||||||
|
data['head'] = self.head
|
||||||
|
data['tail'] = self.tail
|
||||||
|
|
||||||
|
if self.padding:
|
||||||
|
data['padding'] = '%0{0}d'.format(self.padding)
|
||||||
|
else:
|
||||||
|
data['padding'] = '%d'
|
||||||
|
|
||||||
|
if '{holes}' in pattern:
|
||||||
|
data['holes'] = self.holes().format('{ranges}')
|
||||||
|
|
||||||
|
if '{range}' in pattern or '{ranges}' in pattern:
|
||||||
|
indexes = list(self.indexes)
|
||||||
|
indexes_count = len(indexes)
|
||||||
|
|
||||||
|
if indexes_count == 0:
|
||||||
|
data['range'] = ''
|
||||||
|
|
||||||
|
elif indexes_count == 1:
|
||||||
|
data['range'] = '{0}'.format(indexes[0])
|
||||||
|
|
||||||
|
else:
|
||||||
|
data['range'] = '{0}-{1}'.format(
|
||||||
|
indexes[0], indexes[-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
if '{ranges}' in pattern:
|
||||||
|
separated = self.separate()
|
||||||
|
if len(separated) > 1:
|
||||||
|
ranges = [collection.format('{range}')
|
||||||
|
for collection in separated]
|
||||||
|
|
||||||
|
else:
|
||||||
|
ranges = [data['range']]
|
||||||
|
|
||||||
|
data['ranges'] = ', '.join(ranges)
|
||||||
|
|
||||||
|
return pattern.format(**data)
|
||||||
|
|
||||||
|
def is_contiguous(self):
|
||||||
|
'''Return whether entire collection is contiguous.'''
|
||||||
|
previous = None
|
||||||
|
for index in self.indexes:
|
||||||
|
if previous is None:
|
||||||
|
previous = index
|
||||||
|
continue
|
||||||
|
|
||||||
|
if index != (previous + 1):
|
||||||
|
return False
|
||||||
|
|
||||||
|
previous = index
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def holes(self):
|
||||||
|
'''Return holes in collection.
|
||||||
|
|
||||||
|
Return :py:class:`~clique.collection.Collection` of missing indexes.
|
||||||
|
|
||||||
|
'''
|
||||||
|
missing = set([])
|
||||||
|
previous = None
|
||||||
|
for index in self.indexes:
|
||||||
|
if previous is None:
|
||||||
|
previous = index
|
||||||
|
continue
|
||||||
|
|
||||||
|
if index != (previous + 1):
|
||||||
|
missing.update(range(previous + 1, index))
|
||||||
|
|
||||||
|
previous = index
|
||||||
|
|
||||||
|
return Collection(self.head, self.tail, self.padding, indexes=missing)
|
||||||
|
|
||||||
|
def is_compatible(self, collection):
|
||||||
|
'''Return whether *collection* is compatible with this collection.
|
||||||
|
|
||||||
|
To be compatible *collection* must have the same head, tail and padding
|
||||||
|
properties as this collection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return all([
|
||||||
|
isinstance(collection, Collection),
|
||||||
|
collection.head == self.head,
|
||||||
|
collection.tail == self.tail,
|
||||||
|
collection.padding == self.padding
|
||||||
|
])
|
||||||
|
|
||||||
|
def merge(self, collection):
|
||||||
|
'''Merge *collection* into this collection.
|
||||||
|
|
||||||
|
If the *collection* is compatible with this collection then update
|
||||||
|
indexes with all indexes in *collection*.
|
||||||
|
|
||||||
|
raise :py:class:`~error.CollectionError` if *collection* is not
|
||||||
|
compatible with this collection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not self.is_compatible(collection):
|
||||||
|
raise error.CollectionError('Collection is not compatible '
|
||||||
|
'with this collection.')
|
||||||
|
|
||||||
|
self.indexes.update(collection.indexes)
|
||||||
|
|
||||||
|
def separate(self):
|
||||||
|
'''Return contiguous parts of collection as separate collections.
|
||||||
|
|
||||||
|
Return as list of :py:class:`~clique.collection.Collection` instances.
|
||||||
|
|
||||||
|
'''
|
||||||
|
collections = []
|
||||||
|
start = None
|
||||||
|
end = None
|
||||||
|
|
||||||
|
for index in self.indexes:
|
||||||
|
if start is None:
|
||||||
|
start = index
|
||||||
|
end = start
|
||||||
|
continue
|
||||||
|
|
||||||
|
if index != (end + 1):
|
||||||
|
collections.append(
|
||||||
|
Collection(self.head, self.tail, self.padding,
|
||||||
|
indexes=set(range(start, end + 1)))
|
||||||
|
)
|
||||||
|
start = index
|
||||||
|
|
||||||
|
end = index
|
||||||
|
|
||||||
|
if start is None:
|
||||||
|
collections.append(
|
||||||
|
Collection(self.head, self.tail, self.padding)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
collections.append(
|
||||||
|
Collection(self.head, self.tail, self.padding,
|
||||||
|
indexes=range(start, end + 1))
|
||||||
|
)
|
||||||
|
|
||||||
|
return collections
|
||||||
44
pype/vendor/clique/descriptor.py
vendored
Normal file
44
pype/vendor/clique/descriptor.py
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
|
||||||
|
# :license: See LICENSE.txt.
|
||||||
|
|
||||||
|
|
||||||
|
class Unsettable(object):
|
||||||
|
'''Prevent standard setting of property.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
>>> class Foo(object):
|
||||||
|
...
|
||||||
|
... x = Unsettable('x')
|
||||||
|
...
|
||||||
|
... def __init__(self):
|
||||||
|
... self.__dict__['x'] = True
|
||||||
|
...
|
||||||
|
>>> foo = Foo()
|
||||||
|
>>> print foo.x
|
||||||
|
True
|
||||||
|
>>> foo.x = False
|
||||||
|
AttributeError: Cannot set attribute defined as unsettable.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, label):
|
||||||
|
'''Initialise descriptor with property *label*.
|
||||||
|
|
||||||
|
*label* should match the name of the property being described::
|
||||||
|
|
||||||
|
x = Unsettable('x')
|
||||||
|
|
||||||
|
'''
|
||||||
|
self.label = label
|
||||||
|
super(Unsettable, self).__init__()
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
'''Return value of property for *instance*.'''
|
||||||
|
return instance.__dict__.get(self.label)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
'''Set *value* for *instance* property.'''
|
||||||
|
raise AttributeError('Cannot set attribute defined as unsettable.')
|
||||||
|
|
||||||
10
pype/vendor/clique/error.py
vendored
Normal file
10
pype/vendor/clique/error.py
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
|
||||||
|
# :license: See LICENSE.txt.
|
||||||
|
|
||||||
|
'''Custom error classes.'''
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionError(Exception):
|
||||||
|
'''Raise when a collection error occurs.'''
|
||||||
|
|
||||||
62
pype/vendor/clique/sorted_set.py
vendored
Normal file
62
pype/vendor/clique/sorted_set.py
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
|
||||||
|
# :license: See LICENSE.txt.
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import bisect
|
||||||
|
|
||||||
|
|
||||||
|
class SortedSet(collections.MutableSet):
|
||||||
|
'''Maintain sorted collection of unique items.'''
|
||||||
|
|
||||||
|
def __init__(self, iterable=None):
|
||||||
|
'''Initialise with items from *iterable*.'''
|
||||||
|
super(SortedSet, self).__init__()
|
||||||
|
self._members = []
|
||||||
|
if iterable:
|
||||||
|
self.update(iterable)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''Return string representation.'''
|
||||||
|
return str(self._members)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
'''Return representation.'''
|
||||||
|
return '<{0} "{1}">'.format(self.__class__.__name__, self)
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
'''Return whether *item* is present.'''
|
||||||
|
return self._index(item) >= 0
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
'''Return number of items.'''
|
||||||
|
return len(self._members)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
'''Return iterator over items.'''
|
||||||
|
return iter(self._members)
|
||||||
|
|
||||||
|
def add(self, item):
|
||||||
|
'''Add *item*.'''
|
||||||
|
if not item in self:
|
||||||
|
index = bisect.bisect_right(self._members, item)
|
||||||
|
self._members.insert(index, item)
|
||||||
|
|
||||||
|
def discard(self, item):
|
||||||
|
'''Remove *item*.'''
|
||||||
|
index = self._index(item)
|
||||||
|
if index >= 0:
|
||||||
|
del self._members[index]
|
||||||
|
|
||||||
|
def update(self, iterable):
|
||||||
|
'''Update items with those from *iterable*.'''
|
||||||
|
for item in iterable:
|
||||||
|
self.add(item)
|
||||||
|
|
||||||
|
def _index(self, item):
|
||||||
|
'''Return index of *item* in member list or -1 if not present.'''
|
||||||
|
index = bisect.bisect_left(self._members, item)
|
||||||
|
if index != len(self) and self._members[index] == item:
|
||||||
|
return index
|
||||||
|
|
||||||
|
return -1
|
||||||
4
pype/vendor/ftrack_action_handler/__init__.py
vendored
Normal file
4
pype/vendor/ftrack_action_handler/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2017 ftrack
|
||||||
|
|
||||||
|
from ._version import __version__
|
||||||
1
pype/vendor/ftrack_action_handler/_version.py
vendored
Normal file
1
pype/vendor/ftrack_action_handler/_version.py
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = '0.1.3'
|
||||||
233
pype/vendor/ftrack_action_handler/action.py
vendored
Normal file
233
pype/vendor/ftrack_action_handler/action.py
vendored
Normal file
|
|
@ -0,0 +1,233 @@
|
||||||
|
# :coding: utf-8
|
||||||
|
# :copyright: Copyright (c) 2017 ftrack
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import ftrack_api
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAction(object):
|
||||||
|
'''Custom Action base class
|
||||||
|
|
||||||
|
`label` a descriptive string identifing your action.
|
||||||
|
|
||||||
|
`varaint` To group actions together, give them the same
|
||||||
|
label and specify a unique variant per action.
|
||||||
|
|
||||||
|
`identifier` a unique identifier for your action.
|
||||||
|
|
||||||
|
`description` a verbose descriptive text for you action
|
||||||
|
|
||||||
|
'''
|
||||||
|
label = None
|
||||||
|
variant = None
|
||||||
|
identifier = None
|
||||||
|
description = None
|
||||||
|
|
||||||
|
def __init__(self, session):
|
||||||
|
'''Expects a ftrack_api.Session instance'''
|
||||||
|
|
||||||
|
self.logger = logging.getLogger(
|
||||||
|
'{0}.{1}'.format(__name__, self.__class__.__name__)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.label is None:
|
||||||
|
raise ValueError(
|
||||||
|
'Action missing label.'
|
||||||
|
)
|
||||||
|
|
||||||
|
elif self.identifier is None:
|
||||||
|
raise ValueError(
|
||||||
|
'Action missing identifier.'
|
||||||
|
)
|
||||||
|
|
||||||
|
self._session = session
|
||||||
|
|
||||||
|
@property
|
||||||
|
def session(self):
|
||||||
|
'''Return current session.'''
|
||||||
|
return self._session
|
||||||
|
|
||||||
|
def register(self):
|
||||||
|
'''Registers the action, subscribing the the discover and launch topics.'''
|
||||||
|
self.session.event_hub.subscribe(
|
||||||
|
'topic=ftrack.action.discover', self._discover
|
||||||
|
)
|
||||||
|
|
||||||
|
self.session.event_hub.subscribe(
|
||||||
|
'topic=ftrack.action.launch and data.actionIdentifier={0}'.format(
|
||||||
|
self.identifier
|
||||||
|
),
|
||||||
|
self._launch
|
||||||
|
)
|
||||||
|
|
||||||
|
def _discover(self, event):
|
||||||
|
args = self._translate_event(
|
||||||
|
self.session, event
|
||||||
|
)
|
||||||
|
|
||||||
|
accepts = self.discover(
|
||||||
|
self.session, *args
|
||||||
|
)
|
||||||
|
|
||||||
|
if accepts:
|
||||||
|
return {
|
||||||
|
'items': [{
|
||||||
|
'label': self.label,
|
||||||
|
'variant': self.variant,
|
||||||
|
'description': self.description,
|
||||||
|
'actionIdentifier': self.identifier,
|
||||||
|
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
def discover(self, session, entities, event):
|
||||||
|
'''Return true if we can handle the selected entities.
|
||||||
|
|
||||||
|
*session* is a `ftrack_api.Session` instance
|
||||||
|
|
||||||
|
|
||||||
|
*entities* is a list of tuples each containing the entity type and the entity id.
|
||||||
|
If the entity is a hierarchical you will always get the entity
|
||||||
|
type TypedContext, once retrieved through a get operation you
|
||||||
|
will have the "real" entity type ie. example Shot, Sequence
|
||||||
|
or Asset Build.
|
||||||
|
|
||||||
|
*event* the unmodified original event
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _translate_event(self, session, event):
|
||||||
|
'''Return *event* translated structure to be used with the API.'''
|
||||||
|
|
||||||
|
_selection = event['data'].get('selection', [])
|
||||||
|
|
||||||
|
_entities = list()
|
||||||
|
for entity in _selection:
|
||||||
|
_entities.append(
|
||||||
|
(
|
||||||
|
self._get_entity_type(entity), entity.get('entityId')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return [
|
||||||
|
_entities,
|
||||||
|
event
|
||||||
|
]
|
||||||
|
|
||||||
|
def _get_entity_type(self, entity):
|
||||||
|
'''Return translated entity type tht can be used with API.'''
|
||||||
|
# Get entity type and make sure it is lower cased. Most places except
|
||||||
|
# the component tab in the Sidebar will use lower case notation.
|
||||||
|
entity_type = entity.get('entityType').replace('_', '').lower()
|
||||||
|
|
||||||
|
for schema in self.session.schemas:
|
||||||
|
alias_for = schema.get('alias_for')
|
||||||
|
|
||||||
|
if (
|
||||||
|
alias_for and isinstance(alias_for, str) and
|
||||||
|
alias_for.lower() == entity_type
|
||||||
|
):
|
||||||
|
return schema['id']
|
||||||
|
|
||||||
|
for schema in self.session.schemas:
|
||||||
|
if schema['id'].lower() == entity_type:
|
||||||
|
return schema['id']
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
'Unable to translate entity type: {0}.'.format(entity_type)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _launch(self, event):
|
||||||
|
args = self._translate_event(
|
||||||
|
self.session, event
|
||||||
|
)
|
||||||
|
|
||||||
|
interface = self._interface(
|
||||||
|
self.session, *args
|
||||||
|
)
|
||||||
|
|
||||||
|
if interface:
|
||||||
|
return interface
|
||||||
|
|
||||||
|
response = self.launch(
|
||||||
|
self.session, *args
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._handle_result(
|
||||||
|
self.session, response, *args
|
||||||
|
)
|
||||||
|
|
||||||
|
def launch(self, session, entities, event):
|
||||||
|
'''Callback method for the custom action.
|
||||||
|
|
||||||
|
return either a bool ( True if successful or False if the action failed )
|
||||||
|
or a dictionary with they keys `message` and `success`, the message should be a
|
||||||
|
string and will be displayed as feedback to the user, success should be a bool,
|
||||||
|
True if successful or False if the action failed.
|
||||||
|
|
||||||
|
*session* is a `ftrack_api.Session` instance
|
||||||
|
|
||||||
|
*entities* is a list of tuples each containing the entity type and the entity id.
|
||||||
|
If the entity is a hierarchical you will always get the entity
|
||||||
|
type TypedContext, once retrieved through a get operation you
|
||||||
|
will have the "real" entity type ie. example Shot, Sequence
|
||||||
|
or Asset Build.
|
||||||
|
|
||||||
|
*event* the unmodified original event
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _interface(self, *args):
|
||||||
|
interface = self.interface(*args)
|
||||||
|
|
||||||
|
if interface:
|
||||||
|
return {
|
||||||
|
'items': interface
|
||||||
|
}
|
||||||
|
|
||||||
|
def interface(self, session, entities, event):
|
||||||
|
'''Return a interface if applicable or None
|
||||||
|
|
||||||
|
*session* is a `ftrack_api.Session` instance
|
||||||
|
|
||||||
|
*entities* is a list of tuples each containing the entity type and the entity id.
|
||||||
|
If the entity is a hierarchical you will always get the entity
|
||||||
|
type TypedContext, once retrieved through a get operation you
|
||||||
|
will have the "real" entity type ie. example Shot, Sequence
|
||||||
|
or Asset Build.
|
||||||
|
|
||||||
|
*event* the unmodified original event
|
||||||
|
'''
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _handle_result(self, session, result, entities, event):
|
||||||
|
'''Validate the returned result from the action callback'''
|
||||||
|
if isinstance(result, bool):
|
||||||
|
result = {
|
||||||
|
'success': result,
|
||||||
|
'message': (
|
||||||
|
'{0} launched successfully.'.format(
|
||||||
|
self.label
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
elif isinstance(result, dict):
|
||||||
|
for key in ('success', 'message'):
|
||||||
|
if key in result:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise KeyError(
|
||||||
|
'Missing required key: {0}.'.format(key)
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.logger.error(
|
||||||
|
'Invalid result type must be bool or dictionary!'
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
868
pype/vendor/six.py
vendored
Normal file
868
pype/vendor/six.py
vendored
Normal file
|
|
@ -0,0 +1,868 @@
|
||||||
|
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||||
|
|
||||||
|
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||||
|
__version__ = "1.10.0"
|
||||||
|
|
||||||
|
|
||||||
|
# Useful for very coarse version differentiation.
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
integer_types = int,
|
||||||
|
class_types = type,
|
||||||
|
text_type = str
|
||||||
|
binary_type = bytes
|
||||||
|
|
||||||
|
MAXSIZE = sys.maxsize
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
integer_types = (int, long)
|
||||||
|
class_types = (type, types.ClassType)
|
||||||
|
text_type = unicode
|
||||||
|
binary_type = str
|
||||||
|
|
||||||
|
if sys.platform.startswith("java"):
|
||||||
|
# Jython always uses 32 bits.
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||||
|
class X(object):
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return 1 << 31
|
||||||
|
try:
|
||||||
|
len(X())
|
||||||
|
except OverflowError:
|
||||||
|
# 32-bit
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# 64-bit
|
||||||
|
MAXSIZE = int((1 << 63) - 1)
|
||||||
|
del X
|
||||||
|
|
||||||
|
|
||||||
|
def _add_doc(func, doc):
|
||||||
|
"""Add documentation to a function."""
|
||||||
|
func.__doc__ = doc
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(name):
|
||||||
|
"""Import module, returning the module after the last dot."""
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyDescr(object):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, obj, tp):
|
||||||
|
result = self._resolve()
|
||||||
|
setattr(obj, self.name, result) # Invokes __set__.
|
||||||
|
try:
|
||||||
|
# This is a bit ugly, but it avoids running this again by
|
||||||
|
# removing this descriptor.
|
||||||
|
delattr(obj.__class__, self.name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MovedModule(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old, new=None):
|
||||||
|
super(MovedModule, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new is None:
|
||||||
|
new = name
|
||||||
|
self.mod = new
|
||||||
|
else:
|
||||||
|
self.mod = old
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
return _import_module(self.mod)
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
_module = self._resolve()
|
||||||
|
value = getattr(_module, attr)
|
||||||
|
setattr(self, attr, value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyModule(types.ModuleType):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
super(_LazyModule, self).__init__(name)
|
||||||
|
self.__doc__ = self.__class__.__doc__
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
attrs = ["__doc__", "__name__"]
|
||||||
|
attrs += [attr.name for attr in self._moved_attributes]
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
# Subclasses should override this
|
||||||
|
_moved_attributes = []
|
||||||
|
|
||||||
|
|
||||||
|
class MovedAttribute(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||||
|
super(MovedAttribute, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new_mod is None:
|
||||||
|
new_mod = name
|
||||||
|
self.mod = new_mod
|
||||||
|
if new_attr is None:
|
||||||
|
if old_attr is None:
|
||||||
|
new_attr = name
|
||||||
|
else:
|
||||||
|
new_attr = old_attr
|
||||||
|
self.attr = new_attr
|
||||||
|
else:
|
||||||
|
self.mod = old_mod
|
||||||
|
if old_attr is None:
|
||||||
|
old_attr = name
|
||||||
|
self.attr = old_attr
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
module = _import_module(self.mod)
|
||||||
|
return getattr(module, self.attr)
|
||||||
|
|
||||||
|
|
||||||
|
class _SixMetaPathImporter(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
A meta path importer to import six.moves and its submodules.
|
||||||
|
|
||||||
|
This class implements a PEP302 finder and loader. It should be compatible
|
||||||
|
with Python 2.5 and all existing versions of Python3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, six_module_name):
|
||||||
|
self.name = six_module_name
|
||||||
|
self.known_modules = {}
|
||||||
|
|
||||||
|
def _add_module(self, mod, *fullnames):
|
||||||
|
for fullname in fullnames:
|
||||||
|
self.known_modules[self.name + "." + fullname] = mod
|
||||||
|
|
||||||
|
def _get_module(self, fullname):
|
||||||
|
return self.known_modules[self.name + "." + fullname]
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
if fullname in self.known_modules:
|
||||||
|
return self
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __get_module(self, fullname):
|
||||||
|
try:
|
||||||
|
return self.known_modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
raise ImportError("This loader does not know module " + fullname)
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
try:
|
||||||
|
# in case of a reload
|
||||||
|
return sys.modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
mod = self.__get_module(fullname)
|
||||||
|
if isinstance(mod, MovedModule):
|
||||||
|
mod = mod._resolve()
|
||||||
|
else:
|
||||||
|
mod.__loader__ = self
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
"""
|
||||||
|
Return true, if the named module is a package.
|
||||||
|
|
||||||
|
We need this method to get correct spec objects with
|
||||||
|
Python 3.4 (see PEP451)
|
||||||
|
"""
|
||||||
|
return hasattr(self.__get_module(fullname), "__path__")
|
||||||
|
|
||||||
|
def get_code(self, fullname):
|
||||||
|
"""Return None
|
||||||
|
|
||||||
|
Required, if is_package is implemented"""
|
||||||
|
self.__get_module(fullname) # eventually raises ImportError
|
||||||
|
return None
|
||||||
|
get_source = get_code # same as get_code
|
||||||
|
|
||||||
|
_importer = _SixMetaPathImporter(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _MovedItems(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
|
||||||
|
|
||||||
|
_moved_attributes = [
|
||||||
|
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||||
|
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||||
|
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||||
|
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||||
|
MovedAttribute("intern", "__builtin__", "sys"),
|
||||||
|
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||||
|
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||||
|
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||||
|
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||||
|
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||||
|
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||||
|
MovedAttribute("StringIO", "StringIO", "io"),
|
||||||
|
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||||
|
MovedAttribute("UserList", "UserList", "collections"),
|
||||||
|
MovedAttribute("UserString", "UserString", "collections"),
|
||||||
|
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||||
|
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||||
|
MovedModule("builtins", "__builtin__"),
|
||||||
|
MovedModule("configparser", "ConfigParser"),
|
||||||
|
MovedModule("copyreg", "copy_reg"),
|
||||||
|
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||||
|
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||||
|
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||||
|
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||||
|
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||||
|
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||||
|
MovedModule("http_client", "httplib", "http.client"),
|
||||||
|
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||||
|
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||||
|
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||||
|
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||||
|
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||||
|
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||||
|
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||||
|
MovedModule("cPickle", "cPickle", "pickle"),
|
||||||
|
MovedModule("queue", "Queue"),
|
||||||
|
MovedModule("reprlib", "repr"),
|
||||||
|
MovedModule("socketserver", "SocketServer"),
|
||||||
|
MovedModule("_thread", "thread", "_thread"),
|
||||||
|
MovedModule("tkinter", "Tkinter"),
|
||||||
|
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||||
|
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||||
|
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||||
|
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||||
|
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||||
|
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||||
|
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||||
|
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||||
|
"tkinter.colorchooser"),
|
||||||
|
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||||
|
"tkinter.commondialog"),
|
||||||
|
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||||
|
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||||
|
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||||
|
"tkinter.simpledialog"),
|
||||||
|
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||||
|
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||||
|
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||||
|
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||||
|
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||||
|
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||||
|
]
|
||||||
|
# Add windows specific modules.
|
||||||
|
if sys.platform == "win32":
|
||||||
|
_moved_attributes += [
|
||||||
|
MovedModule("winreg", "_winreg"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for attr in _moved_attributes:
|
||||||
|
setattr(_MovedItems, attr.name, attr)
|
||||||
|
if isinstance(attr, MovedModule):
|
||||||
|
_importer._add_module(attr, "moves." + attr.name)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
_MovedItems._moved_attributes = _moved_attributes
|
||||||
|
|
||||||
|
moves = _MovedItems(__name__ + ".moves")
|
||||||
|
_importer._add_module(moves, "moves")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_parse(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_parse_moved_attributes = [
|
||||||
|
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_parse_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||||
|
"moves.urllib_parse", "moves.urllib.parse")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_error(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_error_moved_attributes = [
|
||||||
|
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_error_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||||
|
"moves.urllib_error", "moves.urllib.error")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_request(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_request_moved_attributes = [
|
||||||
|
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_request_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||||
|
"moves.urllib_request", "moves.urllib.request")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_response(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_response_moved_attributes = [
|
||||||
|
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_response_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||||
|
"moves.urllib_response", "moves.urllib.response")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||||
|
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_robotparser_moved_attributes = [
|
||||||
|
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_robotparser_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||||
|
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib(types.ModuleType):
|
||||||
|
|
||||||
|
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
parse = _importer._get_module("moves.urllib_parse")
|
||||||
|
error = _importer._get_module("moves.urllib_error")
|
||||||
|
request = _importer._get_module("moves.urllib_request")
|
||||||
|
response = _importer._get_module("moves.urllib_response")
|
||||||
|
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||||
|
"moves.urllib")
|
||||||
|
|
||||||
|
|
||||||
|
def add_move(move):
|
||||||
|
"""Add an item to six.moves."""
|
||||||
|
setattr(_MovedItems, move.name, move)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_move(name):
|
||||||
|
"""Remove item from six.moves."""
|
||||||
|
try:
|
||||||
|
delattr(_MovedItems, name)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del moves.__dict__[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError("no such move, %r" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
_meth_func = "__func__"
|
||||||
|
_meth_self = "__self__"
|
||||||
|
|
||||||
|
_func_closure = "__closure__"
|
||||||
|
_func_code = "__code__"
|
||||||
|
_func_defaults = "__defaults__"
|
||||||
|
_func_globals = "__globals__"
|
||||||
|
else:
|
||||||
|
_meth_func = "im_func"
|
||||||
|
_meth_self = "im_self"
|
||||||
|
|
||||||
|
_func_closure = "func_closure"
|
||||||
|
_func_code = "func_code"
|
||||||
|
_func_defaults = "func_defaults"
|
||||||
|
_func_globals = "func_globals"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
advance_iterator = next
|
||||||
|
except NameError:
|
||||||
|
def advance_iterator(it):
|
||||||
|
return it.next()
|
||||||
|
next = advance_iterator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
def callable(obj):
|
||||||
|
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound
|
||||||
|
|
||||||
|
create_bound_method = types.MethodType
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return func
|
||||||
|
|
||||||
|
Iterator = object
|
||||||
|
else:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound.im_func
|
||||||
|
|
||||||
|
def create_bound_method(func, obj):
|
||||||
|
return types.MethodType(func, obj, obj.__class__)
|
||||||
|
|
||||||
|
def create_unbound_method(func, cls):
|
||||||
|
return types.MethodType(func, None, cls)
|
||||||
|
|
||||||
|
class Iterator(object):
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return type(self).__next__(self)
|
||||||
|
|
||||||
|
callable = callable
|
||||||
|
_add_doc(get_unbound_function,
|
||||||
|
"""Get the function out of a possibly unbound function""")
|
||||||
|
|
||||||
|
|
||||||
|
get_method_function = operator.attrgetter(_meth_func)
|
||||||
|
get_method_self = operator.attrgetter(_meth_self)
|
||||||
|
get_function_closure = operator.attrgetter(_func_closure)
|
||||||
|
get_function_code = operator.attrgetter(_func_code)
|
||||||
|
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||||
|
get_function_globals = operator.attrgetter(_func_globals)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return iter(d.keys(**kw))
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return iter(d.values(**kw))
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.items(**kw))
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return iter(d.lists(**kw))
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("keys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("values")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("items")
|
||||||
|
else:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return d.iterkeys(**kw)
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return d.itervalues(**kw)
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return d.iteritems(**kw)
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return d.iterlists(**kw)
|
||||||
|
|
||||||
|
viewkeys = operator.methodcaller("viewkeys")
|
||||||
|
|
||||||
|
viewvalues = operator.methodcaller("viewvalues")
|
||||||
|
|
||||||
|
viewitems = operator.methodcaller("viewitems")
|
||||||
|
|
||||||
|
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||||
|
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||||
|
_add_doc(iteritems,
|
||||||
|
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||||
|
_add_doc(iterlists,
|
||||||
|
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def b(s):
|
||||||
|
return s.encode("latin-1")
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
unichr = chr
|
||||||
|
import struct
|
||||||
|
int2byte = struct.Struct(">B").pack
|
||||||
|
del struct
|
||||||
|
byte2int = operator.itemgetter(0)
|
||||||
|
indexbytes = operator.getitem
|
||||||
|
iterbytes = iter
|
||||||
|
import io
|
||||||
|
StringIO = io.StringIO
|
||||||
|
BytesIO = io.BytesIO
|
||||||
|
_assertCountEqual = "assertCountEqual"
|
||||||
|
if sys.version_info[1] <= 1:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
else:
|
||||||
|
_assertRaisesRegex = "assertRaisesRegex"
|
||||||
|
_assertRegex = "assertRegex"
|
||||||
|
else:
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
# Workaround for standalone backslash
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||||
|
unichr = unichr
|
||||||
|
int2byte = chr
|
||||||
|
|
||||||
|
def byte2int(bs):
|
||||||
|
return ord(bs[0])
|
||||||
|
|
||||||
|
def indexbytes(buf, i):
|
||||||
|
return ord(buf[i])
|
||||||
|
iterbytes = functools.partial(itertools.imap, ord)
|
||||||
|
import StringIO
|
||||||
|
StringIO = BytesIO = StringIO.StringIO
|
||||||
|
_assertCountEqual = "assertItemsEqual"
|
||||||
|
_assertRaisesRegex = "assertRaisesRegexp"
|
||||||
|
_assertRegex = "assertRegexpMatches"
|
||||||
|
_add_doc(b, """Byte literal""")
|
||||||
|
_add_doc(u, """Text literal""")
|
||||||
|
|
||||||
|
|
||||||
|
def assertCountEqual(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRaisesRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def assertRegex(self, *args, **kwargs):
|
||||||
|
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
exec_ = getattr(moves.builtins, "exec")
|
||||||
|
|
||||||
|
def reraise(tp, value, tb=None):
|
||||||
|
if value is None:
|
||||||
|
value = tp()
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
|
||||||
|
else:
|
||||||
|
def exec_(_code_, _globs_=None, _locs_=None):
|
||||||
|
"""Execute code in a namespace."""
|
||||||
|
if _globs_ is None:
|
||||||
|
frame = sys._getframe(1)
|
||||||
|
_globs_ = frame.f_globals
|
||||||
|
if _locs_ is None:
|
||||||
|
_locs_ = frame.f_locals
|
||||||
|
del frame
|
||||||
|
elif _locs_ is None:
|
||||||
|
_locs_ = _globs_
|
||||||
|
exec("""exec _code_ in _globs_, _locs_""")
|
||||||
|
|
||||||
|
exec_("""def reraise(tp, value, tb=None):
|
||||||
|
raise tp, value, tb
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
if from_value is None:
|
||||||
|
raise value
|
||||||
|
raise value from from_value
|
||||||
|
""")
|
||||||
|
elif sys.version_info[:2] > (3, 2):
|
||||||
|
exec_("""def raise_from(value, from_value):
|
||||||
|
raise value from from_value
|
||||||
|
""")
|
||||||
|
else:
|
||||||
|
def raise_from(value, from_value):
|
||||||
|
raise value
|
||||||
|
|
||||||
|
|
||||||
|
print_ = getattr(moves.builtins, "print", None)
|
||||||
|
if print_ is None:
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
"""The new-style print function for Python 2.4 and 2.5."""
|
||||||
|
fp = kwargs.pop("file", sys.stdout)
|
||||||
|
if fp is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def write(data):
|
||||||
|
if not isinstance(data, basestring):
|
||||||
|
data = str(data)
|
||||||
|
# If the file has an encoding, encode unicode with it.
|
||||||
|
if (isinstance(fp, file) and
|
||||||
|
isinstance(data, unicode) and
|
||||||
|
fp.encoding is not None):
|
||||||
|
errors = getattr(fp, "errors", None)
|
||||||
|
if errors is None:
|
||||||
|
errors = "strict"
|
||||||
|
data = data.encode(fp.encoding, errors)
|
||||||
|
fp.write(data)
|
||||||
|
want_unicode = False
|
||||||
|
sep = kwargs.pop("sep", None)
|
||||||
|
if sep is not None:
|
||||||
|
if isinstance(sep, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(sep, str):
|
||||||
|
raise TypeError("sep must be None or a string")
|
||||||
|
end = kwargs.pop("end", None)
|
||||||
|
if end is not None:
|
||||||
|
if isinstance(end, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(end, str):
|
||||||
|
raise TypeError("end must be None or a string")
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("invalid keyword arguments to print()")
|
||||||
|
if not want_unicode:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
break
|
||||||
|
if want_unicode:
|
||||||
|
newline = unicode("\n")
|
||||||
|
space = unicode(" ")
|
||||||
|
else:
|
||||||
|
newline = "\n"
|
||||||
|
space = " "
|
||||||
|
if sep is None:
|
||||||
|
sep = space
|
||||||
|
if end is None:
|
||||||
|
end = newline
|
||||||
|
for i, arg in enumerate(args):
|
||||||
|
if i:
|
||||||
|
write(sep)
|
||||||
|
write(arg)
|
||||||
|
write(end)
|
||||||
|
if sys.version_info[:2] < (3, 3):
|
||||||
|
_print = print_
|
||||||
|
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
fp = kwargs.get("file", sys.stdout)
|
||||||
|
flush = kwargs.pop("flush", False)
|
||||||
|
_print(*args, **kwargs)
|
||||||
|
if flush and fp is not None:
|
||||||
|
fp.flush()
|
||||||
|
|
||||||
|
_add_doc(reraise, """Reraise an exception.""")
|
||||||
|
|
||||||
|
if sys.version_info[0:2] < (3, 4):
|
||||||
|
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||||
|
updated=functools.WRAPPER_UPDATES):
|
||||||
|
def wrapper(f):
|
||||||
|
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||||
|
f.__wrapped__ = wrapped
|
||||||
|
return f
|
||||||
|
return wrapper
|
||||||
|
else:
|
||||||
|
wraps = functools.wraps
|
||||||
|
|
||||||
|
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(meta):
|
||||||
|
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
return meta(name, bases, d)
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||||
|
|
||||||
|
|
||||||
|
def add_metaclass(metaclass):
|
||||||
|
"""Class decorator for creating a class with a metaclass."""
|
||||||
|
def wrapper(cls):
|
||||||
|
orig_vars = cls.__dict__.copy()
|
||||||
|
slots = orig_vars.get('__slots__')
|
||||||
|
if slots is not None:
|
||||||
|
if isinstance(slots, str):
|
||||||
|
slots = [slots]
|
||||||
|
for slots_var in slots:
|
||||||
|
orig_vars.pop(slots_var)
|
||||||
|
orig_vars.pop('__dict__', None)
|
||||||
|
orig_vars.pop('__weakref__', None)
|
||||||
|
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def python_2_unicode_compatible(klass):
|
||||||
|
"""
|
||||||
|
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||||
|
Under Python 3 it does nothing.
|
||||||
|
|
||||||
|
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||||
|
returning text and apply this decorator to the class.
|
||||||
|
"""
|
||||||
|
if PY2:
|
||||||
|
if '__str__' not in klass.__dict__:
|
||||||
|
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||||
|
"to %s because it doesn't define __str__()." %
|
||||||
|
klass.__name__)
|
||||||
|
klass.__unicode__ = klass.__str__
|
||||||
|
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||||
|
return klass
|
||||||
|
|
||||||
|
|
||||||
|
# Complete the moves implementation.
|
||||||
|
# This code is at the end of this module to speed up module loading.
|
||||||
|
# Turn this module into a package.
|
||||||
|
__path__ = [] # required for PEP 302 and PEP 451
|
||||||
|
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||||
|
if globals().get("__spec__") is not None:
|
||||||
|
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||||
|
# Remove other six meta path importers, since they cause problems. This can
|
||||||
|
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||||
|
# this for some reason.)
|
||||||
|
if sys.meta_path:
|
||||||
|
for i, importer in enumerate(sys.meta_path):
|
||||||
|
# Here's some real nastiness: Another "instance" of the six module might
|
||||||
|
# be floating around. Therefore, we can't use isinstance() to check for
|
||||||
|
# the six meta path importer, since the other six instance will have
|
||||||
|
# inserted an importer with different class.
|
||||||
|
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||||
|
importer.name == __name__):
|
||||||
|
del sys.meta_path[i]
|
||||||
|
break
|
||||||
|
del i, importer
|
||||||
|
# Finally, add the importer to the meta path import hook.
|
||||||
|
sys.meta_path.append(_importer)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue